[Storage] Service Team Feeback Fixes Part 1 (#804)

* Storage Team code review.

* Code review fixes.

* Post review meeting fixes.

* Code review fixes.

* Code review fixes.

* Code review fixes.

* Always count on Pylint to ruin your perfect commit.
This commit is contained in:
Travis Prescott 2016-09-07 16:25:55 -07:00 коммит произвёл GitHub
Родитель 5cec7c917d
Коммит 04db046634
8 изменённых файлов: 4794 добавлений и 58 удалений

Просмотреть файл

@ -66,9 +66,6 @@
<SubType>Code</SubType>
</Compile>
<Compile Include="command_modules\azure-cli-feedback\azure\cli\command_modules\feedback\__init__.py" />
<Compile Include="command_modules\azure-cli-feedback\azure\cli\command_modules\__init__.py" />
<Compile Include="command_modules\azure-cli-feedback\azure\cli\__init__.py" />
<Compile Include="command_modules\azure-cli-feedback\azure\__init__.py" />
<Compile Include="command_modules\azure-cli-feedback\setup.py" />
<Compile Include="azure\cli\utils\vcr_test_base.py" />
<Compile Include="command_modules\azure-cli-iot\azure\cli\command_modules\iot\custom.py" />
@ -134,9 +131,6 @@
<Compile Include="command_modules\azure-cli-iot\azure\cli\command_modules\iot\_help.py" />
<Compile Include="command_modules\azure-cli-iot\azure\cli\command_modules\iot\_params.py" />
<Compile Include="command_modules\azure-cli-iot\azure\cli\command_modules\iot\__init__.py" />
<Compile Include="command_modules\azure-cli-iot\azure\cli\command_modules\__init__.py" />
<Compile Include="command_modules\azure-cli-iot\azure\cli\__init__.py" />
<Compile Include="command_modules\azure-cli-iot\azure\__init__.py" />
<Compile Include="command_modules\azure-cli-iot\setup.py" />
<Compile Include="command_modules\azure-cli-keyvault\azure\cli\command_modules\keyvault\custom.py" />
<Compile Include="command_modules\azure-cli-keyvault\azure\cli\command_modules\keyvault\generated.py" />
@ -400,9 +394,6 @@
</Compile>
<Compile Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\_params.py" />
<Compile Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\__init__.py" />
<Compile Include="command_modules\azure-cli-redis\azure\cli\command_modules\__init__.py" />
<Compile Include="command_modules\azure-cli-redis\azure\cli\__init__.py" />
<Compile Include="command_modules\azure-cli-redis\azure\__init__.py" />
<Compile Include="command_modules\azure-cli-redis\setup.py" />
<Compile Include="command_modules\azure-cli-resource\azure\cli\command_modules\resource\_factory.py">
<SubType>Code</SubType>

Просмотреть файл

@ -4,7 +4,6 @@
#---------------------------------------------------------------------------------------------
from azure.cli.commands import create_command, command_table
from azure.cli.command_modules.storage._validators import validate_client_parameters
def cli_storage_data_plane_command(name, operation, client_factory,
@ -19,17 +18,20 @@ def cli_storage_data_plane_command(name, operation, client_factory,
command.add_argument('account_name', '--account-name', required=False, default=None,
arg_group=group_name,
help='Storage account name. Must be used in conjunction with either '
'storage account key or a SAS token. Var: AZURE_STORAGE_ACCOUNT')
'storage account key or a SAS token. Environment variable: '
'AZURE_STORAGE_ACCOUNT')
command.add_argument('account_key', '--account-key', required=False, default=None,
arg_group=group_name,
help='Storage account key. Must be used in conjunction with storage '
'account name. Var: AZURE_STORAGE_KEY')
'account name. Environment variable: '
'AZURE_STORAGE_KEY')
command.add_argument('connection_string', '--connection-string', required=False, default=None,
validator=validate_client_parameters, arg_group=group_name,
help='Storage account connection string. Var: '
help='Storage account connection string. Environment variable: '
'AZURE_STORAGE_CONNECTION_STRING')
command.add_argument('sas_token', '--sas-token', required=False, default=None,
arg_group=group_name,
help='A Shared Access Signature (SAS). Must be used in conjunction with '
'storage account name. Var: AZURE_SAS_TOKEN')
'storage account name. Environment variable: '
'AZURE_STORAGE_SAS_TOKEN')
command_table[command.name] = command

Просмотреть файл

@ -4,10 +4,9 @@
#---------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
import os
from six import u as unicode_string
from azure.cli._config import az_config
from azure.cli.commands.parameters import \
(ignore_type, tags_type, get_resource_name_completion_list, get_enum_type_completion_list)
from azure.cli.commands import register_cli_argument, register_extra_cli_argument, CliArgumentType
@ -22,7 +21,7 @@ from azure.storage.blob.baseblobservice import BaseBlobService
from azure.storage.blob.models import ContentSettings as BlobContentSettings, ContainerPermissions, BlobPermissions
from azure.storage.file import FileService
from azure.storage.file.models import ContentSettings as FileContentSettings, SharePermissions, FilePermissions
from azure.storage.table import TableService
from azure.storage.table import TableService, TablePayloadFormat
from azure.storage.queue import QueueService
from azure.storage.queue.models import QueuePermissions
@ -30,18 +29,18 @@ from ._validators import \
(datetime_type, datetime_string_type, get_file_path_validator, validate_metadata,
get_permission_validator, table_permission_validator, get_permission_help_string,
resource_type_type, services_type, ipv4_range_type, validate_entity,
validate_select,
get_content_setting_validator, validate_encryption,
validate_select, validate_source_uri,
get_content_setting_validator, validate_encryption, validate_accept,
process_file_download_namespace, process_logging_update_namespace,
process_metric_update_namespace)
# COMPLETERS
def _get_client(service, parsed_args):
account_name = parsed_args.account_name or os.getenv('AZURE_STORAGE_ACCOUNT')
account_key = parsed_args.account_key or os.getenv('AZURE_STORAGE_KEY')
connection_string = parsed_args.connection_string or os.getenv('AZURE_STORAGE_CONNECTION_STRING')
sas_token = parsed_args.sas_token or os.getenv('AZURE_SAS_TOKEN')
account_name = parsed_args.account_name or az_config.get('storage', 'account', None)
account_key = parsed_args.account_key or az_config.get('storage', 'key', None)
connection_string = parsed_args.connection_string or az_config.get('storage', 'connection_string', None)
sas_token = parsed_args.sas_token or az_config.get('storage', 'sas_token', None)
return get_data_service_client(service, account_name, account_key, connection_string, sas_token)
def get_storage_name_completion_list(service, func, parent=None):
@ -110,10 +109,10 @@ def register_path_argument(scope, default_file_param=None, options_list=None):
register_cli_argument(scope, 'file_name', ignore_type)
register_cli_argument(scope, 'directory_name', ignore_type)
# CONTENT SETTINGS REGISTRATION
# EXTRA PARAMETER SET REGISTRATION
def register_content_settings_argument(scope, settings_class):
register_cli_argument(scope, 'content_settings', ignore_type, validator=get_content_setting_validator(settings_class))
def register_content_settings_argument(scope, settings_class, update):
register_cli_argument(scope, 'content_settings', ignore_type, validator=get_content_setting_validator(settings_class, update))
register_extra_cli_argument(scope, 'content_type', default=None, help='The content MIME type.')
register_extra_cli_argument(scope, 'content_encoding', default=None, help='The content encoding type.')
register_extra_cli_argument(scope, 'content_language', default=None, help='The content language.')
@ -121,12 +120,23 @@ def register_content_settings_argument(scope, settings_class):
register_extra_cli_argument(scope, 'content_cache_control', default=None, help='The cache control string.')
register_extra_cli_argument(scope, 'content_md5', default=None, help='The content\'s MD5 hash.')
def register_source_uri_arguments(scope):
register_cli_argument(scope, 'copy_source', options_list=('--source-uri', '-u'), validator=validate_source_uri, required=False)
register_extra_cli_argument(scope, 'source_sas', default=None, help='The shared access signature for the source storage account.')
register_extra_cli_argument(scope, 'source_share', default=None, help='The share name for the source storage account.')
register_extra_cli_argument(scope, 'source_path', default=None, help='The file path for the source storage account.')
register_extra_cli_argument(scope, 'source_container', default=None, help='The container name for the source storage account.')
register_extra_cli_argument(scope, 'source_blob', default=None, help='The blob name for the source storage account.')
register_extra_cli_argument(scope, 'source_snapshot', default=None, help='The blob snapshot for the source storage account.')
# CUSTOM CHOICE LISTS
blob_types = {'block': BlockBlobService, 'page': PageBlobService, 'append': AppendBlobService}
public_access_types = {'blob': PublicAccess.Blob, 'container': PublicAccess.Container}
delete_snapshot_types = {'include': DeleteSnapshot.Include, 'only': DeleteSnapshot.Only}
storage_account_key_options = {'both': ['key1', 'key2'], 'primary': ['key1'], 'secondary': ['key2']}
table_payload_formats = {'none': TablePayloadFormat.JSON_NO_METADATA, 'minimal': TablePayloadFormat.JSON_MINIMAL_METADATA, 'full': TablePayloadFormat.JSON_FULL_METADATA}
# ARGUMENT TYPES
@ -144,9 +154,11 @@ queue_name_type = CliArgumentType(options_list=('--queue-name', '-q'), help='The
register_cli_argument('storage', 'directory_name', directory_type)
register_cli_argument('storage', 'share_name', share_name_type)
register_cli_argument('storage', 'table_name', table_name_type)
register_cli_argument('storage', 'retry_wait', options_list=('--retry-interval',))
register_cli_argument('storage', 'progress_callback', ignore_type)
register_cli_argument('storage', 'if_modified_since', help='Alter only if modified since supplied UTC datetime (Y-m-d\'T\'H:M\'Z\')', type=datetime_type)
register_cli_argument('storage', 'if_unmodified_since', help='Alter only if unmodified since supplied UTC datetime (Y-m-d\'T\'H:M\'Z\')', type=datetime_type)
register_cli_argument('storage', 'metadata', nargs='+', help='Metadata in space-separated key=value pairs.', validator=validate_metadata)
register_cli_argument('storage', 'metadata', nargs='+', help='Metadata in space-separated key=value pairs. This overwrites any existing metadata.', validator=validate_metadata)
register_cli_argument('storage', 'timeout', help='Request timeout in seconds. Applies to each call to the service.', type=int)
register_cli_argument('storage', 'container_name', container_name_type)
@ -157,15 +169,18 @@ register_cli_argument('storage account connection-string', 'account_name', accou
register_cli_argument('storage account connection-string', 'protocol', help='The default endpoint protocol.', choices=['http', 'https'], type=str.lower)
register_cli_argument('storage account create', 'account_name', account_name_type, options_list=('--name', '-n'), completer=None)
register_cli_argument('storage account create', 'kind', help='Indicates the type of storage account. (Storage, BlobStorage)', completer=get_enum_type_completion_list(Kind))
register_cli_argument('storage account create', 'tags', tags_type)
for item in ['create', 'update']:
register_cli_argument('storage account {}'.format(item), 'sku', help='The storage account SKU. (Standard_LRS, Standard_GRS, Standard_RAGRS, Standard_ZRS, Premium_LRS)', completer=get_enum_type_completion_list(SkuName))
register_cli_argument('storage account {}'.format(item), 'access_tier', help='Required for StandardBlob accounts. The access tier used for billing. Cannot be set for StandardLRS, StandardGRS, StandardRAGRS, or PremiumLRS account types. (Hot, Cool)', completer=get_enum_type_completion_list(AccessTier))
register_cli_argument('storage account {}'.format(item), 'custom_domain', help='User domain assigned to the storage account. Name is the CNAME source. Use empty string to clear.')
register_cli_argument('storage account {}'.format(item), 'encryption', nargs='+', help='Specifies which service(s) to encrypt.', choices=list(EncryptionServices._attribute_map.keys()), validator=validate_encryption) # pylint: disable=protected-access
register_cli_argument('storage account create', 'custom_domain', help='User domain assigned to the storage account. Name is the CNAME source.')
register_cli_argument('storage account update', 'custom_domain', help='User domain assigned to the storage account. Name is the CNAME source. Use "" to clear existing value.')
register_cli_argument('storage account update', 'tags', tags_type, default=None)
register_cli_argument('storage account keys renew', 'key', help='The key(s) to renew.', choices=list(storage_account_key_options.keys()), type=str.lower)
@ -191,14 +206,15 @@ for item in ['download', 'upload']:
register_cli_argument('storage blob {}'.format(item), 'validate_content', action='store_true')
for item in ['update', 'upload']:
register_content_settings_argument('storage blob {}'.format(item), BlobContentSettings)
register_content_settings_argument('storage blob {}'.format(item), BlobContentSettings, item == 'update')
register_cli_argument('storage blob upload', 'blob_type', options_list=('--type', '-t'), choices=list(blob_types.keys()), type=str.lower)
register_cli_argument('storage blob url', 'protocol', choices=['http', 'https'], type=str.lower)
register_cli_argument('storage blob upload', 'maxsize_condition', help='The max length in bytes permitted for an append blob.')
register_cli_argument('storage blob upload', 'validate_content', help='Specifies that an MD5 hash shall be calculated for each chunk of the blob and verified by the service when the chunk has arrived.')
for item in ['file', 'blob']:
register_cli_argument('storage {} copy'.format(item), 'copy_source', options_list=('--source-uri', '-u'))
register_cli_argument('storage {} url'.format(item), 'protocol', help='Protocol to use.', choices=['http', 'https'], default='https', type=str.lower)
register_source_uri_arguments('storage {} copy start'.format(item))
register_cli_argument('storage container', 'container_name', container_name_type, options_list=('--name', '-n'))
@ -209,6 +225,8 @@ register_cli_argument('storage container create', 'public_access', choices=list(
register_cli_argument('storage container delete', 'fail_not_exist', help='Throw an exception if the container does not exist.')
register_cli_argument('storage container exists', 'blob_name', ignore_type)
register_cli_argument('storage container exists', 'blob_name', ignore_type)
register_cli_argument('storage container exists', 'snapshot', ignore_type)
register_cli_argument('storage container policy', 'container_name', container_name_type)
register_cli_argument('storage container policy', 'policy_name', options_list=('--name', '-n'), help='The stored access policy name.', completer=get_storage_acl_name_completion_list(BaseBlobService, 'container_name', 'get_container_acl'))
@ -250,12 +268,13 @@ register_cli_argument('storage file list', 'directory_name', options_list=('--pa
register_path_argument('storage file metadata show')
register_path_argument('storage file metadata update')
register_cli_argument('storage file resize', 'content_length', options_list=('--size',))
register_path_argument('storage file resize')
register_path_argument('storage file show')
for item in ['update', 'upload']:
register_content_settings_argument('storage file {}'.format(item), FileContentSettings)
register_content_settings_argument('storage file {}'.format(item), FileContentSettings, item == 'update')
register_path_argument('storage file update')
@ -281,10 +300,12 @@ register_cli_argument('storage table policy', 'policy_name', options_list=('--na
register_cli_argument('storage entity', 'entity', options_list=('--entity', '-e'), validator=validate_entity, nargs='+')
register_cli_argument('storage entity', 'property_resolver', ignore_type)
register_cli_argument('storage entity', 'select', nargs='+', validator=validate_select)
register_cli_argument('storage entity', 'select', nargs='+', help='Space separated list of properties to return for each entity.', validator=validate_select)
register_cli_argument('storage entity insert', 'if_exists', choices=['fail', 'merge', 'replace'])
register_cli_argument('storage entity query', 'accept', help='Specifies how much metadata to include in the response payload.', choices=table_payload_formats.keys(), default='minimal', validator=validate_accept)
register_cli_argument('storage queue', 'queue_name', queue_name_type, options_list=('--name', '-n'))
register_cli_argument('storage queue create', 'queue_name', queue_name_type, options_list=('--name', '-n'), completer=None)
@ -294,7 +315,7 @@ register_cli_argument('storage queue policy', 'policy_name', options_list=('--na
register_cli_argument('storage message', 'queue_name', queue_name_type)
register_cli_argument('storage message', 'message_id', options_list=('--id',))
register_cli_argument('storage message', 'content', type=unicode_string)
register_cli_argument('storage message', 'content', type=unicode_string, help='Message content, up to 64KB in size.')
for item in ['account', 'blob', 'container', 'file', 'share', 'table', 'queue']:
register_cli_argument('storage {} generate-sas'.format(item), 'ip', help='Specifies the IP address or range of IP addresses from which to accept requests. Supports only IPv4 style addresses.', type=ipv4_range_type)
@ -321,6 +342,7 @@ register_cli_argument('storage account generate-sas', 'services', help='The stor
register_cli_argument('storage account generate-sas', 'resource_types', help='The resource types the SAS is applicable for. Allowed values: (s)ervice (c)ontainer (o)bject. Can be combined.', type=resource_type_type)
register_cli_argument('storage account generate-sas', 'expiry', help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes invalid.', type=datetime_string_type)
register_cli_argument('storage account generate-sas', 'start', help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes valid. Defaults to the time of the request.', type=datetime_string_type)
register_cli_argument('storage account generate-sas', 'account_name', account_name_type, options_list=('--account-name',), help='Storage account name. Must be used in conjunction with either storage account key or a SAS token. Var: AZURE_STORAGE_ACCOUNT')
register_cli_argument('storage logging show', 'services', help='The storage services from which to retrieve logging info: (b)lob (q)ueue (t)able. Can be combined.')
@ -337,7 +359,7 @@ register_cli_argument('storage metrics update', 'minute', help='Update the by-mi
register_cli_argument('storage metrics update', 'api', help='Specify whether to include API in metrics. Applies to both hour and minute metrics if both are specified. Must be specified if hour or minute metrics are enabled and being updated.', choices=['enable', 'disable'])
register_cli_argument('storage metrics update', 'retention', type=int, help='Number of days for which to retain metrics. 0 to disable. Applies to both hour and minute metrics if both are specified.')
register_cli_argument('storage cors', 'max_age', type=int, help='The number of seconds the client/browser should cache a preflight response.')
register_cli_argument('storage cors', 'max_age', type=int, help='The number of seconds the client/browser should cache a preflight response.', default="0")
register_cli_argument('storage cors', 'origins', nargs='+', help='List of origin domains that will be allowed via CORS, or "*" to allow all domains.')
register_cli_argument('storage cors', 'methods', nargs='+', help='List of HTTP methods allowed to be executed by the origin.', choices=['DELETE', 'GET', 'HEAD', 'MERGE', 'POST', 'OPTIONS', 'PUT'], type=str.upper)
register_cli_argument('storage cors', 'allowed_headers', nargs='+', help='List of response headers allowed to be part of the cross-origin request.')
@ -346,3 +368,5 @@ register_cli_argument('storage cors', 'exposed_headers', nargs='+', help='List o
register_cli_argument('storage cors add', 'services', help='The storage service(s) for which to add the CORS rule: (b)lob (f)ile (q)ueue (t)able. Can be combined.')
register_cli_argument('storage cors clear', 'services', help='The storage service(s) for which to clear CORS rules: (b)lob (f)ile (q)ueue (t)able. Can be combined.')
register_cli_argument('storage cors list', 'services', help='The storage service(s) for which to list the CORS rules: (b)lob (f)ile (q)ueue (t)able. Can be combined.')

Просмотреть файл

@ -9,22 +9,37 @@ from datetime import datetime
import os
import re
from azure.cli.commands.client_factory import get_mgmt_service_client
from azure.cli._config import az_config
from azure.cli.commands.client_factory import get_mgmt_service_client, get_data_service_client
from azure.cli.commands.validators import validate_key_value_pairs
from azure.mgmt.storage import StorageManagementClient
from azure.storage.models import ResourceTypes, Services
from azure.storage.table import TablePermissions
from azure.storage.table import TablePermissions, TablePayloadFormat
from azure.storage.blob.baseblobservice import BaseBlobService
from azure.storage.blob.models import ContentSettings as BlobContentSettings
from azure.storage.file import FileService
from azure.storage.file.models import ContentSettings as FileContentSettings
# region PARAMETER VALIDATORS
def validate_accept(namespace):
if namespace.accept:
formats = {
'none': TablePayloadFormat.JSON_NO_METADATA,
'minimal': TablePayloadFormat.JSON_MINIMAL_METADATA,
'full': TablePayloadFormat.JSON_FULL_METADATA
}
namespace.accept = formats[namespace.accept.lower()]
def validate_client_parameters(namespace):
""" Retrieves storage connection parameters from environment variables and parses out
connection string into account name and key """
n = namespace
if not n.connection_string:
n.connection_string = os.environ.get('AZURE_STORAGE_CONNECTION_STRING')
n.connection_string = az_config.get('storage', 'connection_string', None)
# if connection string supplied or in environment variables, extract account key and name
if n.connection_string:
@ -34,11 +49,11 @@ def validate_client_parameters(namespace):
# otherwise, simply try to retrieve the remaining variables from environment variables
if not n.account_name:
n.account_name = os.environ.get('AZURE_STORAGE_ACCOUNT')
n.account_name = az_config.get('storage', 'account', None)
if not n.account_key:
n.account_key = os.environ.get('AZURE_STORAGE_KEY')
n.account_key = az_config.get('storage', 'key', None)
if not n.sas_token:
n.sas_token = os.environ.get('AZURE_SAS_TOKEN')
n.sas_token = az_config.get('storage', 'sas_token', None)
# if account name is specified but no key, attempt to query
if n.account_name and not n.account_key:
@ -52,22 +67,103 @@ def validate_client_parameters(namespace):
else:
raise ValueError("Storage account '{}' not found.".format(n.account_name))
def get_content_setting_validator(settings_class):
def validate_source_uri(namespace):
usage_string = 'invalid usage: supply only one of the following argument sets:' + \
'\n\t --source-uri' + \
'\n\tOR --source-container --source-blob [--source-snapshot] [--source-sas]' + \
'\n\tOR --source-share --source-path [--source-sas]'
ns = vars(namespace)
validate_client_parameters(namespace) # must run first to resolve storage account
storage_acc = ns.get('account_name', None) or az_config.get('storage', 'account', None)
uri = ns.get('copy_source', None)
container = ns.pop('source_container', None)
blob = ns.pop('source_blob', None)
sas = ns.pop('source_sas', None)
snapshot = ns.pop('source_snapshot', None)
share = ns.pop('source_share', None)
path = ns.pop('source_path', None)
if uri:
if any([container, blob, sas, snapshot, share, path]):
raise ValueError(usage_string)
else:
# simplest scenario--no further processing necessary
return
valid_blob_source = container and blob and not share and not path
valid_file_source = share and path and not container and not blob and not snapshot
if (not valid_blob_source and not valid_file_source) or (valid_blob_source and valid_file_source): # pylint: disable=line-too-long
raise ValueError(usage_string)
query_params = []
if sas:
query_params.append(sas)
if snapshot:
query_params.append(snapshot)
uri = 'https://{0}.{1}.core.windows.net/{2}/{3}{4}{5}'.format(
storage_acc,
'blob' if valid_blob_source else 'share',
container if valid_blob_source else share,
blob if valid_blob_source else path,
'?' if query_params else '',
'&'.join(query_params))
namespace.copy_source = uri
def get_content_setting_validator(settings_class, update):
def _class_name(class_type):
return class_type.__module__ + "." + class_type.__class__.__name__
def validator(namespace):
namespace.content_settings = settings_class(
content_type=namespace.content_type,
content_disposition=namespace.content_disposition,
content_encoding=namespace.content_encoding,
content_language=namespace.content_language,
content_md5=namespace.content_md5,
cache_control=namespace.content_cache_control
# must run certain validators first for an update
if update:
validate_client_parameters(namespace)
if update and _class_name(settings_class) == _class_name(FileContentSettings):
get_file_path_validator()(namespace)
ns = vars(namespace)
# retrieve the existing object properties for an update
if update:
account = ns.get('account_name')
key = ns.get('account_key')
cs = ns.get('connection_string')
sas = ns.get('sas_token')
if _class_name(settings_class) == _class_name(BlobContentSettings):
client = get_data_service_client(BaseBlobService, account, key, cs, sas)
container = ns.get('container_name')
blob = ns.get('blob_name')
lease_id = ns.get('lease_id')
props = client.get_blob_properties(container, blob, lease_id=lease_id).properties.content_settings # pylint: disable=line-too-long
elif _class_name(settings_class) == _class_name(FileContentSettings):
client = get_data_service_client(FileService, account, key, cs, sas) # pylint: disable=redefined-variable-type
share = ns.get('share_name')
directory = ns.get('directory_name')
filename = ns.get('file_name')
props = client.get_file_properties(share, directory, filename).properties.content_settings # pylint: disable=line-too-long
# create new properties
new_props = settings_class(
content_type=ns.pop('content_type', None),
content_disposition=ns.pop('content_disposition', None),
content_encoding=ns.pop('content_encoding', None),
content_language=ns.pop('content_language', None),
content_md5=ns.pop('content_md5', None),
cache_control=ns.pop('content_cache_control', None)
)
del namespace.content_type,
del namespace.content_disposition,
del namespace.content_encoding,
del namespace.content_language,
del namespace.content_md5,
del namespace.content_cache_control
# if update, fill in any None values with existing
if update:
new_props.content_type = new_props.content_type or props.content_type
new_props.content_disposition = new_props.content_disposition \
or props.content_disposition
new_props.content_encoding = new_props.content_encoding or props.content_encoding
new_props.content_language = new_props.content_language or props.content_language
new_props.content_md5 = new_props.content_md5 or props.content_md5
new_props.cache_control = new_props.cache_control or props.cache_control
ns['content_settings'] = new_props
namespace = argparse.Namespace(**ns)
return validator
def validate_encryption(namespace):
@ -121,6 +217,9 @@ def get_file_path_validator(default_file_param=None):
""" Creates a namespace validator that splits out 'path' into 'directory_name' and 'file_name'.
Allows another path-type parameter to be named which can supply a default filename. """
def validator(namespace):
if not hasattr(namespace, 'path'):
return
path = namespace.path
dir_name, file_name = os.path.split(path) if path else (None, '')

Просмотреть файл

@ -102,7 +102,7 @@ def set_storage_account_properties(
return scf.storage_accounts.update(resource_group_name, account_name, params)
def upload_blob( # pylint: disable=too-many-locals
client, container_name, blob_name, blob_type, file_path,
client, container_name, blob_name, file_path, blob_type='block',
content_settings=None, metadata=None, validate_content=False, maxsize_condition=None,
max_connections=2, max_retries=5, retry_wait=1, lease_id=None, if_modified_since=None,
if_unmodified_since=None, if_match=None, if_none_match=None, timeout=None):

Просмотреть файл

@ -365,7 +365,7 @@ class StorageFileScenarioTest(StorageAccountVCRTestBase):
raise CLIError('\nDownload failed. Test failed!')
# test resize command
s.cmd('storage file resize --share-name {} -p "{}" --content-length 1234'.format(share, filename))
s.cmd('storage file resize --share-name {} -p "{}" --size 1234'.format(share, filename))
s.cmd('storage file show --share-name {} -p "{}"'.format(share, filename),
checks=JMESPathCheck('properties.contentLength', 1234))

212
storage_group_help.txt Normal file
Просмотреть файл

@ -0,0 +1,212 @@
Group
az storage account
Subgroups:
keys
Commands:
check-name : Checks that account name is valid and is not in use.
connection-string: Show the connection string for a storage account.
create : Create a storage account.
delete : Deletes a storage account in Microsoft Azure.
generate-sas : Generates a shared access signature for the account.
list : List storage accounts.
show : Returns the properties for the specified storage account including but not
limited to name, account type, location, and account status.
show-usage : Show the current count and limit of the storage accounts under the
subscription.
update : Update storage account property (only one at a time).
-------------------------------------------
Group
az storage blob
Subgroups:
copy
lease
metadata
service-properties
Commands:
delete : Marks the specified blob or snapshot for deletion.
download : Downloads a blob to a file path, with automatic chunking and progress
notifications.
exists : Returns a boolean indicating whether the container exists (if blob_name is
None), or otherwise a boolean indicating whether the blob exists.
generate-sas : Generates a shared access signature for the blob.
list : Returns a generator to list the blobs under the specified container.
show : Returns all user-defined metadata, standard HTTP properties, and system
properties for the blob.
snapshot : Creates a read-only snapshot of a blob.
update : Sets system properties on the blob.
upload : Creates a new blob from a file path, or updates the content of an existing
blob, with automatic chunking and progress notifications.
url : Creates the url to access a blob.
-------------------------------------------
Group
az storage container
Subgroups:
lease
metadata
policy
Commands:
create : Creates a new container under the specified account.
delete : Marks the specified container for deletion.
exists : Returns a boolean indicating whether the container exists (if blob_name is None),
or otherwise a boolean indicating whether the blob exists.
generate-sas: Generates a shared access signature for the container.
list : Returns a generator to list the containers under the specified account.
show : Returns all user-defined metadata and system properties for the specified
container.
-------------------------------------------
Group
az storage cors
Commands:
add
clear
list
-------------------------------------------
Group
az storage directory
Subgroups:
metadata
Commands:
create : Creates a new directory under the specified share or parent directory.
delete : Deletes the specified empty directory.
exists : Returns a boolean indicating whether the share exists if only share name is given.
show : Returns all user-defined metadata and system properties for the specified directory.
-------------------------------------------
Group
az storage entity
Commands:
delete : Deletes an existing entity in a table.
insert : Insert a new entity into the table.
merge : Updates an existing entity by merging the entity's properties.
query : Returns a generator to list the entities in the table specified.
replace: Updates an existing entity in a table.
show : Get an entity from the specified table.
-------------------------------------------
Group
az storage file
Subgroups:
copy
metadata
Commands:
delete : Marks the specified file for deletion.
download : Downloads a file to a file path, with automatic chunking and progress
notifications.
exists : Returns a boolean indicating whether the share exists if only share name is given.
generate-sas: Generates a shared access signature for the file.
list : Returns a generator to list the directories and files under the specified share.
resize : Resizes a file to the specified size.
show : Returns all user-defined metadata, standard HTTP properties, and system properties
for the file.
update : Sets system properties on the file.
upload : Creates a new azure file from a local file path, or updates the content of an
existing file, with automatic chunking and progress notifications.
url : Creates the url to access a file.
-------------------------------------------
Group
az storage logging
Commands:
show
update
-------------------------------------------
Group
az storage metrics
Commands:
show
update
-------------------------------------------
Group
az storage message
Commands:
clear : Deletes all messages from the specified queue.
delete: Deletes the specified message.
get : Retrieves one or more messages from the front of the queue.
peek : Retrieves one or more messages from the front of the queue, but does not alter the
visibility of the message.
put : Adds a new message to the back of the message queue.
update: Updates the visibility timeout of a message.
-------------------------------------------
Group
az storage queue
Subgroups:
metadata
policy
Commands:
create : Creates a queue under the given account.
delete : Deletes the specified queue and any messages it contains.
exists : Returns a boolean indicating whether the queue exists.
generate-sas: Generates a shared access signature for the queue.
list : Returns a generator to list the queues.
stats : Retrieves statistics related to replication for the Queue service.
-------------------------------------------
Group
az storage share
Subgroups:
metadata
policy
Commands:
create : Creates a new share under the specified account.
delete : Marks the specified share for deletion.
exists : Returns a boolean indicating whether the share exists if only share name is given.
generate-sas: Generates a shared access signature for the share.
list : Returns a generator to list the shares under the specified account.
show : Returns all user-defined metadata and system properties for the specified share.
stats : Gets the approximate size of the data stored on the share, rounded up to the
nearest gigabyte.
update : Sets service-defined properties for the specified share.
-------------------------------------------
Group
az storage table
Subgroups:
batch
policy
Commands:
create : Creates a new table in the storage account.
delete : Deletes the specified table and any data it contains.
exists : Returns a boolean indicating whether the table exists.
generate-sas: Generates a shared access signature for the table.
list : Returns a generator to list the tables.
stats : Retrieves statistics related to replication for the Table service.

4408
storage_help.txt Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу