Fix circular dependency in task_factory

This commit is contained in:
Fred Park 2017-11-10 08:24:50 -08:00
Родитель 5afa97de45
Коммит 108d0c626e
2 изменённых файлов: 14 добавлений и 13 удалений

Просмотреть файл

@ -272,7 +272,7 @@ UserIdentitySettings = collections.namedtuple(
)
TaskFactoryStorageSettings = collections.namedtuple(
'TaskFactoryStorageSettings', [
'storage_settings', 'storage_link_name', 'remote_path',
'storage_settings', 'storage_link_name', 'container', 'remote_path',
'is_file_share', 'include', 'exclude',
]
)
@ -2421,11 +2421,13 @@ def job_tasks(config, conf):
# get storage settings if applicable
if 'file' in _task['task_factory']:
az = _task['task_factory']['file']['azure_storage']
drp = data_remote_path(az)
tfstorage = TaskFactoryStorageSettings(
storage_settings=credentials_storage(
config, data_storage_account_settings(az)),
storage_link_name=az['storage_account_settings'],
remote_path=data_remote_path(az),
container=data_container_from_remote_path(None, drp),
remote_path=drp,
is_file_share=data_is_file_share(az),
include=_kv_read_checked(az, 'include'),
exclude=_kv_read_checked(az, 'exclude'),

Просмотреть файл

@ -46,7 +46,6 @@ except ImportError: # pramga: no cover
import azure.storage.blob as azureblob
import azure.storage.file as azurefile
# local imports
from . import settings
# global defines
_DEFAULT_SAS_EXPIRY_DAYS = 365 * 30
@ -203,16 +202,16 @@ def _get_storage_entities(task_factory, storage_settings):
account_name=storage_settings.storage_settings.account,
account_key=storage_settings.storage_settings.account_key,
endpoint_suffix=storage_settings.storage_settings.endpoint)
container = settings.data_container_from_remote_path(
None, rp=storage_settings.remote_path)
# list blobs in container with include/exclude
blobs = blob_client.list_blobs(container_name=container)
blobs = blob_client.list_blobs(
container_name=storage_settings.container)
for blob in blobs:
if not _inclusion_check(
blob.name, storage_settings.include,
storage_settings.exclude):
continue
file_path_with_container = '{}/{}'.format(container, blob.name)
file_path_with_container = '{}/{}'.format(
storage_settings.container, blob.name)
file_name = blob.name.split('/')[-1]
file_name_no_extension = file_name.split('.')[0]
if task_factory['file']['task_filepath'] == 'file_path':
@ -233,11 +232,11 @@ def _get_storage_entities(task_factory, storage_settings):
url = 'https://{}.blob.{}/{}/{}'.format(
storage_settings.storage_settings.account,
storage_settings.storage_settings.endpoint,
container,
storage_settings.container,
urlquote(blob.name))
# create blob sas
sas = blob_client.generate_blob_shared_access_signature(
container, blob.name,
storage_settings.container, blob.name,
permission=azureblob.BlobPermissions.READ,
expiry=datetime.datetime.utcnow() +
datetime.timedelta(days=_DEFAULT_SAS_EXPIRY_DAYS))
@ -257,15 +256,15 @@ def _get_storage_entities(task_factory, storage_settings):
account_name=storage_settings.storage_settings.account,
account_key=storage_settings.storage_settings.account_key,
endpoint_suffix=storage_settings.storage_settings.endpoint)
file_share = settings.data_container_from_remote_path(
None, rp=storage_settings.remote_path)
# list files in share with include/exclude
for file in _list_all_files_in_fileshare(file_client, file_share):
for file in _list_all_files_in_fileshare(
file_client, storage_settings.container):
if not _inclusion_check(
file, storage_settings.include,
storage_settings.exclude):
continue
file_path_with_container = '{}/{}'.format(file_share, file)
file_path_with_container = '{}/{}'.format(
storage_settings.container, file)
file_name = file.split('/')[-1]
file_name_no_extension = file_name.split('.')[0]
if task_factory['file']['task_filepath'] == 'file_path':