* Static CI checks

- Fix style of image-copy
- Add PR template
- Add license and codeowner checks
- pylint/pep8

* Use build stages correctly
This commit is contained in:
Derek Bekoe 2017-11-01 10:20:00 -07:00 коммит произвёл GitHub
Родитель dd1b6e64d5
Коммит 4a81d88911
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
15 изменённых файлов: 324 добавлений и 109 удалений

11
.flake8 Normal file
Просмотреть файл

@ -0,0 +1,11 @@
[flake8]
max-line-length = 120
max-complexity = 10
ignore =
E126,
E501,
E722,
E741,
F401,
F811,
C901

3
.github/CODEOWNERS поставляемый
Просмотреть файл

@ -1,4 +1,5 @@
# See for instructions on this file https://help.github.com/articles/about-codeowners/
index.json @derekbekoe
/src/index.json @derekbekoe
/src/image-copy/ @tamirkamara

7
.github/PULL_REQUEST_TEMPLATE.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,7 @@
---
This checklist is used to make sure that common guidelines for a pull request are followed.
### General Guidelines
- [ ] Have you run `./scripts/ci/test_static.sh` locally? (`pip install pylint flake8` required)

3
.gitignore поставляемый
Просмотреть файл

@ -97,5 +97,8 @@ ENV/
# mkdocs documentation
/site
# VS Code
.vscode/settings.json
# mypy
.mypy_cache/

Просмотреть файл

@ -1,10 +1,15 @@
dist: trusty
sudo: off
language: python
python:
- "2.7"
- "3.5"
- "3.6"
install: true
script:
- ls
install:
- pip install pylint flake8
jobs:
include:
- stage: verify
script: ./scripts/ci/test_static.sh
env: PURPOSE='VerifySource-StaticCheck'
python: 3.6
- stage: verify
script: ./scripts/ci/test_static.sh
env: PURPOSE='VerifySource-StaticCheck'
python: 2.7

9
CONTRIBUTING.rst Normal file
Просмотреть файл

@ -0,0 +1,9 @@
Contribute Code
===================================
This project has adopted the `Microsoft Open Source Code of Conduct <https://opensource.microsoft.com/codeofconduct/>`__.
For more information see the `Code of Conduct FAQ <https://opensource.microsoft.com/codeofconduct/faq/>`__ or contact `opencode@microsoft.com <mailto:opencode@microsoft.com>`__ with any additional questions or comments.
If you would like to become an active contributor to this project please
follow the instructions provided in `Microsoft Azure Projects Contribution Guidelines <http://azure.github.io/guidelines.html>`__

48
pylintrc Normal file
Просмотреть файл

@ -0,0 +1,48 @@
[MASTER]
reports=no
score=no
[MESSAGES CONTROL]
# For all codes, run 'pylint --list-msgs' or go to 'https://pylint.readthedocs.io/en/latest/reference_guide/features.html'
# locally-disabled: Warning locally suppressed using disable-msg
# cyclic-import: because of https://github.com/PyCQA/pylint/issues/850
# too-many-arguments: Due to the nature of the CLI many commands have large arguments set which reflect in large arguments set in corresponding methods.
disable=missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code
[TYPECHECK]
# For Azure CLI extensions, we ignore import errors for azure.cli as they'll be available in the environment of the CLI
ignored-modules=azure.cli
[FORMAT]
max-line-length=120
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=yes
[DESIGN]
# Maximum number of locals for function / method body
max-locals=25
# Maximum number of branch for function / method body
max-branches=20
[SIMILARITIES]
min-similarity-lines=10
[BASIC]
# Naming hints based on PEP 8 (https://www.python.org/dev/peps/pep-0008/#naming-conventions).
# Consider these guidelines and not hard rules. Read PEP 8 for more details.
# The invalid-name checker must be **enabled** for these hints to be used.
include-naming-hint=yes
module-name-hint=lowercase (keep short; underscores are discouraged)
const-name-hint=UPPER_CASE_WITH_UNDERSCORES
class-name-hint=CapitalizedWords
class-attribute-name-hint=lower_case_with_underscores
attr-name-hint=lower_case_with_underscores
method-name-hint=lower_case_with_underscores
function-name-hint=lower_case_with_underscores
argument-name-hint=lower_case_with_underscores
variable-name-hint=lower_case_with_underscores
inlinevar-name-hint=lower_case_with_underscores (short is OK)

12
scripts/ci/test_static.sh Executable file
Просмотреть файл

@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -e
proc_number=`python -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
pylint ./src/*/azext_*/ --rcfile=./pylintrc -j $proc_number
flake8 --statistics --append-config=./.flake8 ./src/*/azext_*/
pylint ./scripts/ci/*.py --rcfile=./pylintrc
flake8 --append-config=./.flake8 ./scripts/ci/*.py
python ./scripts/ci/verify_codeowners.py
python ./scripts/ci/verify_license.py

13
scripts/ci/util.py Normal file
Просмотреть файл

@ -0,0 +1,13 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
def get_repo_root():
current_dir = os.path.dirname(os.path.abspath(__file__))
while not os.path.exists(os.path.join(current_dir, 'CONTRIBUTING.rst')):
current_dir = os.path.dirname(current_dir)
return current_dir

Просмотреть файл

@ -0,0 +1,42 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import os
import sys
from util import get_repo_root
REPO_ROOT = get_repo_root()
CODEOWNERS = os.path.join(REPO_ROOT, '.github', 'CODEOWNERS')
SRC_DIR = os.path.join(REPO_ROOT, 'src')
def get_src_dir_codeowners():
contents = []
with open(CODEOWNERS) as f:
contents = [x.strip() for x in f.readlines()]
return dict([x.split(' ') for x in contents if x.startswith('/src/') and x.split(' ')[0].endswith('/')])
def main():
owners = get_src_dir_codeowners()
dangling_entries = [e for e in owners if not os.path.isdir(os.path.join(REPO_ROOT, e[1:]))]
missing_entries = ['/src/{}/'.format(p) for p in os.listdir(SRC_DIR)
if os.path.isdir(os.path.join(SRC_DIR, p)) and '/src/{}/'.format(p) not in owners]
if dangling_entries or missing_entries:
print('Errors whilst verifying {}!'.format(CODEOWNERS))
if dangling_entries:
print("Remove the following {} as these directories don't exist.".format(dangling_entries),
file=sys.stderr)
if missing_entries:
print("The following directories are missing codeowners {}.".format(missing_entries),
file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()

Просмотреть файл

@ -0,0 +1,44 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import os
import sys
from util import get_repo_root
REPO_ROOT = get_repo_root()
SRC_DIR = os.path.join(REPO_ROOT, 'src')
LICENSE_HEADER = """# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
"""
def main():
env_path = os.path.join(REPO_ROOT, 'env')
files_without_header = []
for current_dir, _, files in os.walk(get_repo_root()):
if current_dir.startswith(env_path):
continue
file_itr = (os.path.join(current_dir, p) for p in files if p.endswith('.py'))
for python_file in file_itr:
with open(python_file, 'r') as f:
file_text = f.read()
if file_text and LICENSE_HEADER not in file_text:
files_without_header.append(os.path.join(current_dir, python_file))
if files_without_header:
print("Error: The following files don't have the required license headers: \n{}".format(
'\n'.join(files_without_header)), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()

Просмотреть файл

@ -8,19 +8,29 @@ from azure.cli.core.sdk.util import ParametersContext
helps['image copy'] = """
type: command
short-summary: Allows to copy a managed image (or vm) to other regions. Keep in mind that it requires the source disk to be available.
short-summary: Copy a managed image (or vm) to other regions
long-summary: >
Allows to copy a managed image (or vm) to other regions.
Keep in mind that it requires the source disk to be available.
"""
def load_params(_):
with ParametersContext('image copy') as c:
c.register('source_resource_group_name', '--source-resource-group', help='Name of the resource group of the source resource')
c.register('source_object_name', '--source-object-name', help='The name of the image or vm resource')
c.register('target_location', '--target-location', nargs='+', help='Space separated location list to create the image in (use location short codes like westeurope etc.)')
c.register('source_resource_group_name', '--source-resource-group',
help='Name of the resource group of the source resource')
c.register('source_object_name', '--source-object-name',
help='The name of the image or vm resource')
c.register('target_location', '--target-location', nargs='+',
help='Space separated location list to create the image in (e.g. westeurope etc.)')
c.register('source_type', '--source-type', default='image', choices=['image', 'vm'], help='image or vm')
c.register('target_resource_group_name', '--target-resource-group', help='Name of the resource group to create images in')
c.register('parallel_degree', '--parallel-degree', type=int, default=-1, help='Number of parallel copy operations')
c.register('cleanup', '--cleanup', action='store_true', default=False, \
help='Include this switch to delete temporary resources upon completion')
c.register('target_resource_group_name', '--target-resource-group',
help='Name of the resource group to create images in')
c.register('parallel_degree', '--parallel-degree', type=int, default=-1,
help='Number of parallel copy operations')
c.register('cleanup', '--cleanup', action='store_true', default=False,
help='Include this switch to delete temporary resources upon completion')
def load_commands():
from azure.cli.core.commands import cli_command

Просмотреть файл

@ -1,3 +1,8 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import json
@ -29,6 +34,7 @@ def run_cli_command(cmd, return_as_json=False):
logger.error('command ended with an error: %s', cmd)
raise
def prepare_cli_command(cmd, output_as_json=True):
full_cmd = [sys.executable, '-m', 'azure.cli'] + cmd

Просмотреть файл

@ -1,3 +1,8 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import hashlib
import datetime
import time
@ -10,33 +15,33 @@ logger = azlogging.get_az_logger(__name__)
PROGRESS_LINE_LENGTH = 40
def create_target_image(location, transient_resource_group_name, source_type, source_object_name, \
source_os_disk_snapshot_name, source_os_disk_snapshot_url, source_os_type, \
target_resource_group_name, azure_pool_frequency):
# pylint: disable=too-many-locals
def create_target_image(location, transient_resource_group_name, source_type, source_object_name,
source_os_disk_snapshot_name, source_os_disk_snapshot_url, source_os_type,
target_resource_group_name, azure_pool_frequency):
subscription_id = get_subscription_id()
subscription_hash = hashlib.sha1(subscription_id.encode("UTF-8")).hexdigest()
unique_subscription_string = subscription_hash[:7]
# create the target storage account
logger.warn("{0} - Creating target storage account (can be slow sometimes)".format(location))
target_storage_account_name = location + unique_subscription_string
cmd = prepare_cli_command(['storage', 'account', 'create', \
'--name', target_storage_account_name, \
'--resource-group', transient_resource_group_name, \
'--location', location, \
'--sku', 'Standard_LRS'])
cmd = prepare_cli_command(['storage', 'account', 'create',
'--name', target_storage_account_name,
'--resource-group', transient_resource_group_name,
'--location', location,
'--sku', 'Standard_LRS'])
json_output = run_cli_command(cmd, return_as_json=True)
target_blob_endpoint = json_output['primaryEndpoints']['blob']
# Setup the target storage account
cmd = prepare_cli_command(['storage', 'account', 'keys', 'list', \
'--account-name', target_storage_account_name, \
'--resource-group', transient_resource_group_name])
cmd = prepare_cli_command(['storage', 'account', 'keys', 'list',
'--account-name', target_storage_account_name,
'--resource-group', transient_resource_group_name])
json_output = run_cli_command(cmd, return_as_json=True)
@ -46,59 +51,55 @@ def create_target_image(location, transient_resource_group_name, source_type, so
expiry_format = "%Y-%m-%dT%H:%MZ"
expiry = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
cmd = prepare_cli_command(['storage', 'account', 'generate-sas', \
'--account-name', target_storage_account_name, \
'--account-key', target_storage_account_key, \
'--expiry', expiry.strftime(expiry_format), \
'--permissions', 'aclrpuw', '--resource-types', \
'sco', '--services', 'b', '--https-only'], \
output_as_json=False)
cmd = prepare_cli_command(['storage', 'account', 'generate-sas',
'--account-name', target_storage_account_name,
'--account-key', target_storage_account_key,
'--expiry', expiry.strftime(expiry_format),
'--permissions', 'aclrpuw', '--resource-types',
'sco', '--services', 'b', '--https-only'],
output_as_json=False)
sas_token = run_cli_command(cmd)
sas_token = sas_token.rstrip("\n\r") #STRANGE
sas_token = sas_token.rstrip("\n\r") # STRANGE
logger.debug("sas token: " + sas_token)
# create a container in the target blob storage account
logger.warn("{0} - Creating container in the target storage account".format(location))
target_container_name = 'snapshots'
cmd = prepare_cli_command(['storage', 'container', 'create', \
'--name', target_container_name, \
'--account-name', target_storage_account_name])
cmd = prepare_cli_command(['storage', 'container', 'create',
'--name', target_container_name,
'--account-name', target_storage_account_name])
run_cli_command(cmd)
# Copy the snapshot to the target region using the SAS URL
blob_name = source_os_disk_snapshot_name + '.vhd'
logger.warn("{0} - Copying blob to target storage account".format(location))
cmd = prepare_cli_command(['storage', 'blob', 'copy', 'start', \
'--source-uri', source_os_disk_snapshot_url, \
'--destination-blob', blob_name, \
'--destination-container', target_container_name, \
'--account-name', target_storage_account_name, \
'--sas-token', sas_token])
cmd = prepare_cli_command(['storage', 'blob', 'copy', 'start',
'--source-uri', source_os_disk_snapshot_url,
'--destination-blob', blob_name,
'--destination-container', target_container_name,
'--account-name', target_storage_account_name,
'--sas-token', sas_token])
run_cli_command(cmd)
# Wait for the copy to complete
start_datetime = datetime.datetime.now()
wait_for_blob_copy_operation(blob_name, target_container_name, \
target_storage_account_name, azure_pool_frequency, location)
msg = "{0} - Copy time: {1}".format(location, datetime.datetime.now()-start_datetime).ljust(PROGRESS_LINE_LENGTH)
wait_for_blob_copy_operation(blob_name, target_container_name,
target_storage_account_name, azure_pool_frequency, location)
msg = "{0} - Copy time: {1}".format(location, datetime.datetime.now() - start_datetime).ljust(PROGRESS_LINE_LENGTH)
logger.warn(msg)
# Create the snapshot in the target region from the copied blob
logger.warn("{0} - Creating snapshot in target region from the copied blob".format(location))
target_blob_path = target_blob_endpoint + target_container_name + '/' + blob_name
target_snapshot_name = source_os_disk_snapshot_name + '-' + location
cmd = prepare_cli_command(['snapshot', 'create', \
'--resource-group', transient_resource_group_name, \
'--name', target_snapshot_name, \
'--location', location, \
'--source', target_blob_path])
cmd = prepare_cli_command(['snapshot', 'create',
'--resource-group', transient_resource_group_name,
'--name', target_snapshot_name,
'--location', location,
'--source', target_blob_path])
json_output = run_cli_command(cmd, return_as_json=True)
target_snapshot_id = json_output['id']
@ -110,36 +111,37 @@ def create_target_image(location, transient_resource_group_name, source_type, so
target_image_name += '-image'
target_image_name += '-' + location
cmd = prepare_cli_command(['image', 'create', \
'--resource-group', target_resource_group_name, \
'--name', target_image_name, \
'--location', location, \
'--source', target_blob_path, \
'--os-type', source_os_type, \
'--source', target_snapshot_id])
cmd = prepare_cli_command(['image', 'create',
'--resource-group', target_resource_group_name,
'--name', target_image_name,
'--location', location,
'--source', target_blob_path,
'--os-type', source_os_type,
'--source', target_snapshot_id])
run_cli_command(cmd)
def wait_for_blob_copy_operation(blob_name, target_container_name, target_storage_account_name, azure_pool_frequency, location):
def wait_for_blob_copy_operation(blob_name, target_container_name, target_storage_account_name,
azure_pool_frequency, location):
progress_controller = APPLICATION.get_progress_controller()
copy_status = "pending"
prev_progress = -1
while copy_status == "pending":
cmd = prepare_cli_command(['storage', 'blob', 'show', \
'--name', blob_name, \
'--container-name', target_container_name, \
'--account-name', target_storage_account_name])
cmd = prepare_cli_command(['storage', 'blob', 'show',
'--name', blob_name,
'--container-name', target_container_name,
'--account-name', target_storage_account_name])
json_output = run_cli_command(cmd, return_as_json=True)
copy_status = json_output["properties"]["copy"]["status"]
copy_progress_1, copy_progress_2 = json_output["properties"]["copy"]["progress"].split("/")
current_progress = round(int(copy_progress_1)/int(copy_progress_2), 1)
current_progress = round(int(copy_progress_1) / int(copy_progress_2), 1)
if current_progress != prev_progress:
msg = "{0} - copy progress: {1}%"\
.format(location, str(current_progress))\
.ljust(PROGRESS_LINE_LENGTH) #need to justify since messages overide each other
.ljust(PROGRESS_LINE_LENGTH) # need to justify since messages overide each other
progress_controller.add(message=msg)
prev_progress = current_progress
@ -150,7 +152,6 @@ def wait_for_blob_copy_operation(blob_name, target_container_name, target_storag
progress_controller.stop()
return
if copy_status == 'success':
progress_controller.stop()
else:

Просмотреть файл

@ -1,3 +1,8 @@
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from multiprocessing import Pool
from azext_imagecopy.cli_utils import run_cli_command, prepare_cli_command
@ -7,14 +12,15 @@ import azure.cli.core.azlogging as azlogging
logger = azlogging.get_az_logger(__name__)
def imagecopy(source_resource_group_name, source_object_name, target_location, \
target_resource_group_name, source_type='image', cleanup='false', parallel_degree=-1):
# pylint: disable=too-many-statements
def imagecopy(source_resource_group_name, source_object_name, target_location,
target_resource_group_name, source_type='image', cleanup='false', parallel_degree=-1):
# get the os disk id from source vm/image
logger.warn("Getting os disk id of the source vm/image")
cmd = prepare_cli_command([source_type, 'show', \
'--name', source_object_name, \
'--resource-group', source_resource_group_name])
cmd = prepare_cli_command([source_type, 'show',
'--name', source_object_name,
'--resource-group', source_resource_group_name])
json_cmd_output = run_cli_command(cmd, return_as_json=True)
@ -22,30 +28,27 @@ def imagecopy(source_resource_group_name, source_object_name, target_location, \
source_os_type = json_cmd_output['storageProfile']['osDisk']['osType']
logger.debug("source_os_disk_id: %s. source_os_type: %s", source_os_disk_id, source_os_type)
# create source snapshots
logger.warn("Creating source snapshot")
source_os_disk_snapshot_name = source_object_name + '_os_disk_snapshot'
cmd = prepare_cli_command(['snapshot', 'create', \
'--name', source_os_disk_snapshot_name, \
'--resource-group', source_resource_group_name, \
'--source', source_os_disk_id])
cmd = prepare_cli_command(['snapshot', 'create',
'--name', source_os_disk_snapshot_name,
'--resource-group', source_resource_group_name,
'--source', source_os_disk_id])
run_cli_command(cmd)
# Get SAS URL for the snapshotName
logger.warn("Getting sas url for the source snapshot")
cmd = prepare_cli_command(['snapshot', 'grant-access', \
'--name', source_os_disk_snapshot_name, \
'--resource-group', source_resource_group_name, \
'--duration-in-seconds', '3600'])
cmd = prepare_cli_command(['snapshot', 'grant-access',
'--name', source_os_disk_snapshot_name,
'--resource-group', source_resource_group_name,
'--duration-in-seconds', '3600'])
json_output = run_cli_command(cmd, return_as_json=True)
source_os_disk_snapshot_url = json_output['accessSas']
logger.debug("source os disk snapshot url: %s" , source_os_disk_snapshot_url)
logger.debug("source os disk snapshot url: %s", source_os_disk_snapshot_url)
# Start processing in the target locations
@ -66,15 +69,15 @@ def imagecopy(source_resource_group_name, source_object_name, target_location, \
azure_pool_frequency = 5
if target_locations_count >= 5:
azure_pool_frequency = 15
elif target_locations_count >= 3:
elif target_locations_count >= 3:
azure_pool_frequency = 10
tasks = []
for location in target_location:
location = location.strip()
tasks.append((location, transient_resource_group_name, source_type, \
source_object_name, source_os_disk_snapshot_name, source_os_disk_snapshot_url, \
source_os_type, target_resource_group_name, azure_pool_frequency))
tasks.append((location, transient_resource_group_name, source_type,
source_object_name, source_os_disk_snapshot_name, source_os_disk_snapshot_url,
source_os_type, target_resource_group_name, azure_pool_frequency))
logger.warn("Starting async process for all locations")
@ -87,45 +90,45 @@ def imagecopy(source_resource_group_name, source_object_name, target_location, \
except KeyboardInterrupt:
logger.warn('User cancelled the operation')
if cleanup:
logger.warn('To cleanup temporary resources look for ones tagged with "image-copy-extension". \nYou can use the following command: az resource list --tag created_by=image-copy-extension')
logger.warn('To cleanup temporary resources look for ones tagged with "image-copy-extension". \n'
'You can use the following command: az resource list --tag created_by=image-copy-extension')
pool.terminate()
return
# Cleanup
if cleanup:
logger.warn('Deleting transient resources')
# Delete resource group
cmd = prepare_cli_command(['group', 'delete', '--no-wait', '--yes', \
'--name', transient_resource_group_name])
cmd = prepare_cli_command(['group', 'delete', '--no-wait', '--yes',
'--name', transient_resource_group_name])
run_cli_command(cmd)
# Revoke sas for source snapshot
cmd = prepare_cli_command(['snapshot', 'revoke-access', \
'--name', source_os_disk_snapshot_name, \
'--resource-group', source_resource_group_name])
cmd = prepare_cli_command(['snapshot', 'revoke-access',
'--name', source_os_disk_snapshot_name,
'--resource-group', source_resource_group_name])
run_cli_command(cmd)
# Delete source snapshot
cmd = prepare_cli_command(['snapshot', 'delete', \
'--name', source_os_disk_snapshot_name, \
'--resource-group', source_resource_group_name])
cmd = prepare_cli_command(['snapshot', 'delete',
'--name', source_os_disk_snapshot_name,
'--resource-group', source_resource_group_name])
run_cli_command(cmd)
def create_resource_group(resource_group_name, location):
# check if target resource group exists
cmd = prepare_cli_command(['group', 'exists', \
'--name', resource_group_name], output_as_json=False)
cmd = prepare_cli_command(['group', 'exists',
'--name', resource_group_name], output_as_json=False)
cmd_output = run_cli_command(cmd)
if 'false' in cmd_output:
# create the target resource group
logger.warn("Creating resource group: %s", resource_group_name)
cmd = prepare_cli_command(['group', 'create', \
'--name', resource_group_name, \
'--location', location])
cmd = prepare_cli_command(['group', 'create',
'--name', resource_group_name,
'--location', location])
run_cli_command(cmd)