This commit is contained in:
Lili Deng 2023-04-25 11:50:31 +08:00 коммит произвёл LiliDeng
Родитель 12a1362d0e
Коммит 4295522326
6 изменённых файлов: 237 добавлений и 25 удалений

Просмотреть файл

@ -52,6 +52,7 @@ from azure.storage.blob import (
from azure.storage.fileshare import ShareServiceClient # type: ignore
from dataclasses_json import dataclass_json
from marshmallow import validate
from msrestazure.azure_cloud import Cloud # type: ignore
from PIL import Image, UnidentifiedImageError
from retry import retry
@ -551,6 +552,8 @@ def get_compute_client(
credential=platform.credential,
subscription_id=subscription_id,
api_version=api_version,
base_url=platform.cloud.endpoints.resource_manager,
credential_scopes=[platform.cloud.endpoints.resource_manager + "/.default"],
)
@ -620,6 +623,8 @@ def get_private_dns_management_client(
return PrivateDnsManagementClient(
credential=platform.credential,
subscription_id=platform.subscription_id,
base_url=platform.cloud.endpoints.resource_manager,
credential_scopes=[platform.cloud.endpoints.resource_manager + "/.default"],
)
@ -627,10 +632,13 @@ def create_update_private_zones(
platform: "AzurePlatform",
resource_group_name: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
private_zone_location: str = "global",
) -> Any:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
private_zones = private_dns_client.private_zones.begin_create_or_update(
resource_group_name=resource_group_name,
private_zone_name=private_zone_name,
@ -645,9 +653,12 @@ def delete_private_zones(
platform: "AzurePlatform",
resource_group_name: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
) -> None:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
try:
private_dns_client.private_zones.get(
resource_group_name=resource_group_name,
@ -679,11 +690,14 @@ def create_update_record_sets(
resource_group_name: str,
ipv4_address: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
relative_record_set_name: str = "privatelink",
record_type: str = "A",
) -> None:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
private_dns_client.record_sets.create_or_update(
resource_group_name=resource_group_name,
private_zone_name=private_zone_name,
@ -698,11 +712,14 @@ def delete_record_sets(
platform: "AzurePlatform",
resource_group_name: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
relative_record_set_name: str = "privatelink",
record_type: str = "A",
) -> None:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
try:
private_dns_client.record_sets.get(
resource_group_name=resource_group_name,
@ -727,12 +744,15 @@ def create_update_virtual_network_links(
resource_group_name: str,
virtual_network_resource_id: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
virtual_network_link_name: str = "vnetlink",
registration_enabled: bool = False,
virtual_network_link_location: str = "global",
) -> None:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
private_dns_client.virtual_network_links.begin_create_or_update(
resource_group_name=resource_group_name,
private_zone_name=private_zone_name,
@ -750,10 +770,13 @@ def delete_virtual_network_links(
platform: "AzurePlatform",
resource_group_name: str,
log: Logger,
private_zone_name: str = "privatelink.file.core.windows.net",
private_zone_name: str = "privatelink",
virtual_network_link_name: str = "vnetlink",
) -> None:
private_dns_client = get_private_dns_management_client(platform)
private_zone_name = ".".join(
[private_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
try:
private_dns_client.virtual_network_links.get(
resource_group_name=resource_group_name,
@ -778,9 +801,12 @@ def create_update_private_dns_zone_groups(
log: Logger,
private_dns_zone_group_name: str = "default",
private_endpoint_name: str = "pe_test",
private_dns_zone_name: str = "privatelink.file.core.windows.net",
private_dns_zone_name: str = "privatelink",
) -> None:
network_client = get_network_client(platform)
private_dns_zone_name = ".".join(
[private_dns_zone_name, "file", platform.cloud.suffixes.storage_endpoint]
)
# network_client.private_dns_zone_groups.delete()
network_client.private_dns_zone_groups.begin_create_or_update(
resource_group_name=resource_group_name,
@ -843,23 +869,30 @@ def get_network_client(platform: "AzurePlatform") -> ComputeManagementClient:
return NetworkManagementClient(
credential=platform.credential,
subscription_id=platform.subscription_id,
base_url=platform.cloud.endpoints.resource_manager,
credential_scopes=[platform.cloud.endpoints.resource_manager + "/.default"],
)
def get_storage_client(
credential: Any, subscription_id: str
credential: Any, subscription_id: str, cloud: Cloud
) -> StorageManagementClient:
return StorageManagementClient(
credential=credential,
subscription_id=subscription_id,
base_url=cloud.endpoints.resource_manager,
credential_scopes=[cloud.endpoints.resource_manager + "/.default"],
)
def get_resource_management_client(
credential: Any, subscription_id: str
credential: Any, subscription_id: str, cloud: Cloud
) -> ResourceManagementClient:
return ResourceManagementClient(
credential=credential, subscription_id=subscription_id
credential=credential,
subscription_id=subscription_id,
base_url=cloud.endpoints.resource_manager,
credential_scopes=[cloud.endpoints.resource_manager + "/.default"],
)
@ -877,6 +910,8 @@ def get_marketplace_ordering_client(
return MarketplaceOrderingAgreements(
credential=platform.credential,
subscription_id=platform.subscription_id,
base_url=platform.cloud.endpoints.resource_manager,
credential_scopes=[platform.cloud.endpoints.resource_manager + "/.default"],
)
@ -916,13 +951,17 @@ def wait_operation(
def get_storage_credential(
credential: Any, subscription_id: str, account_name: str, resource_group_name: str
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
) -> Any:
"""
return a shared key credential. This credential doesn't need extra
permissions to access blobs.
"""
storage_client = get_storage_client(credential, subscription_id)
storage_client = get_storage_client(credential, subscription_id, cloud)
key = storage_client.storage_accounts.list_keys(
account_name=account_name, resource_group_name=resource_group_name
).keys[0]
@ -932,6 +971,7 @@ def get_storage_credential(
def generate_blob_sas_token(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
container_name: str,
@ -941,6 +981,7 @@ def generate_blob_sas_token(
shared_key_credential = get_storage_credential(
credential=credential,
subscription_id=subscription_id,
cloud=cloud,
account_name=account_name,
resource_group_name=resource_group_name,
)
@ -959,6 +1000,7 @@ def generate_blob_sas_token(
def generate_sas_token(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
expired_hours: int = 2,
@ -966,6 +1008,7 @@ def generate_sas_token(
) -> Any:
shared_key_credential = get_storage_credential(
credential=credential,
cloud=cloud,
subscription_id=subscription_id,
account_name=account_name,
resource_group_name=resource_group_name,
@ -986,6 +1029,7 @@ def generate_sas_token(
def get_blob_service_client(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
) -> BlobServiceClient:
@ -995,11 +1039,13 @@ def get_blob_service_client(
shared_key_credential = get_storage_credential(
credential=credential,
subscription_id=subscription_id,
cloud=cloud,
account_name=account_name,
resource_group_name=resource_group_name,
)
blob_service_client = BlobServiceClient(
f"https://{account_name}.blob.core.windows.net", shared_key_credential
f"https://{account_name}.blob.{cloud.suffixes.storage_endpoint}",
shared_key_credential,
)
return blob_service_client
@ -1007,6 +1053,7 @@ def get_blob_service_client(
def get_or_create_storage_container(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
container_name: str,
resource_group_name: str,
@ -1015,7 +1062,11 @@ def get_or_create_storage_container(
Create a Azure Storage container if it does not exist.
"""
blob_service_client = get_blob_service_client(
credential, subscription_id, account_name, resource_group_name
credential,
subscription_id,
cloud,
account_name,
resource_group_name,
)
container_client = blob_service_client.get_container_client(container_name)
if not container_client.exists():
@ -1026,6 +1077,7 @@ def get_or_create_storage_container(
def check_or_create_storage_account(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
location: str,
@ -1039,7 +1091,7 @@ def check_or_create_storage_account(
# is too big, Azure may not able to delete deployment script on time. so there
# will be error like below
# Creating the deployment 'name' would exceed the quota of '800'.
storage_client = get_storage_client(credential, subscription_id)
storage_client = get_storage_client(credential, subscription_id, cloud)
try:
storage_client.storage_accounts.get_properties(
account_name=account_name,
@ -1065,11 +1117,12 @@ def check_or_create_storage_account(
def delete_storage_account(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
log: Logger,
) -> None:
storage_client = get_storage_client(credential, subscription_id)
storage_client = get_storage_client(credential, subscription_id, cloud)
try:
storage_client.storage_accounts.get_properties(
account_name=account_name,
@ -1088,11 +1141,14 @@ def delete_storage_account(
def check_or_create_resource_group(
credential: Any,
subscription_id: str,
cloud: Cloud,
resource_group_name: str,
location: str,
log: Logger,
) -> None:
with get_resource_management_client(credential, subscription_id) as rm_client:
with get_resource_management_client(
credential, subscription_id, cloud
) as rm_client:
with global_credential_access_lock:
az_shared_rg_exists = rm_client.resource_groups.check_existence(
resource_group_name
@ -1130,6 +1186,7 @@ def copy_vhd_to_storage(
container_client = get_or_create_storage_container(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_name,
container_name=SAS_COPIED_CONTAINER_NAME,
resource_group_name=platform._azure_runbook.shared_resource_group_name,
@ -1175,6 +1232,7 @@ def copy_vhd_to_storage(
sas_token = generate_sas_token(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_name,
resource_group_name=platform._azure_runbook.shared_resource_group_name, # noqa: E501
writable=True,
@ -1237,17 +1295,19 @@ def wait_copy_blob(
def get_share_service_client(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
resource_group_name: str,
) -> ShareServiceClient:
shared_key_credential = get_storage_credential(
credential=credential,
subscription_id=subscription_id,
cloud=cloud,
account_name=account_name,
resource_group_name=resource_group_name,
)
share_service_client = ShareServiceClient(
f"https://{account_name}.file.core.windows.net",
f"https://{account_name}.file.{cloud.suffixes.storage_endpoint}",
shared_key_credential,
)
return share_service_client
@ -1256,6 +1316,7 @@ def get_share_service_client(
def get_or_create_file_share(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
file_share_name: str,
resource_group_name: str,
@ -1266,7 +1327,11 @@ def get_or_create_file_share(
Create a Azure Storage file share if it does not exist.
"""
share_service_client = get_share_service_client(
credential, subscription_id, account_name, resource_group_name
credential,
subscription_id,
cloud,
account_name,
resource_group_name,
)
all_shares = list(share_service_client.list_shares())
if file_share_name not in (x.name for x in all_shares):
@ -1278,6 +1343,7 @@ def get_or_create_file_share(
def delete_file_share(
credential: Any,
subscription_id: str,
cloud: Cloud,
account_name: str,
file_share_name: str,
resource_group_name: str,
@ -1287,7 +1353,11 @@ def delete_file_share(
Delete Azure Storage file share
"""
share_service_client = get_share_service_client(
credential, subscription_id, account_name, resource_group_name
credential,
subscription_id,
cloud,
account_name,
resource_group_name,
)
log.debug(f"deleting file share {file_share_name}")
share_service_client.delete_share(file_share_name)
@ -1448,7 +1518,9 @@ def get_vhd_details(platform: "AzurePlatform", vhd_path: str) -> Any:
sc_name = matched.group("sc")
container_name = matched.group("container")
blob_name = matched.group("blob")
storage_client = get_storage_client(platform.credential, platform.subscription_id)
storage_client = get_storage_client(
platform.credential, platform.subscription_id, platform.cloud
)
# sometimes it will fail for below reason if list storage accounts like this way
# [x for x in storage_client.storage_accounts.list() if x.name == sc_name]
# failure - Message: Resource provider 'Microsoft.Storage' failed to return collection response for type 'storageAccounts'. # noqa: E501
@ -1482,6 +1554,7 @@ def _generate_sas_token_for_vhd(
source_container_client = get_or_create_storage_container(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=sc_name,
container_name=container_name,
resource_group_name=rg,
@ -1490,6 +1563,7 @@ def _generate_sas_token_for_vhd(
sas_token = generate_sas_token(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=sc_name,
resource_group_name=rg,
)
@ -1533,6 +1607,7 @@ def get_deployable_vhd_path(
check_or_create_storage_account(
platform.credential,
platform.subscription_id,
platform.cloud,
storage_name,
platform._azure_runbook.shared_resource_group_name,
location,
@ -1735,6 +1810,7 @@ def check_blob_exist(
container_client = get_or_create_storage_container(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=account_name,
container_name=container_name,
resource_group_name=resource_group_name,

Просмотреть файл

@ -1869,6 +1869,7 @@ class Nfs(AzureFeatureMixin, features.Nfs):
check_or_create_storage_account(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=self.storage_account_name,
resource_group_name=resource_group_name,
location=location,
@ -1880,6 +1881,7 @@ class Nfs(AzureFeatureMixin, features.Nfs):
get_or_create_file_share(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=self.storage_account_name,
file_share_name=self.file_share_name,
resource_group_name=resource_group_name,
@ -1944,6 +1946,7 @@ class Nfs(AzureFeatureMixin, features.Nfs):
delete_file_share(
platform.credential,
platform.subscription_id,
platform.cloud,
self.storage_account_name,
self.file_share_name,
resource_group_name,
@ -1952,6 +1955,7 @@ class Nfs(AzureFeatureMixin, features.Nfs):
delete_storage_account(
platform.credential,
platform.subscription_id,
platform.cloud,
self.storage_account_name,
resource_group_name,
self._log,

Просмотреть файл

@ -9,7 +9,7 @@ import os
import re
import sys
from copy import deepcopy
from dataclasses import dataclass, field
from dataclasses import InitVar, dataclass, field
from datetime import datetime
from difflib import SequenceMatcher
from functools import lru_cache, partial
@ -39,6 +39,13 @@ from azure.mgmt.resource.resources.models import ( # type: ignore
from cachetools import TTLCache, cached
from dataclasses_json import dataclass_json
from marshmallow import fields, validate
from msrestazure.azure_cloud import ( # type: ignore
AZURE_CHINA_CLOUD,
AZURE_GERMAN_CLOUD,
AZURE_PUBLIC_CLOUD,
AZURE_US_GOV_CLOUD,
Cloud,
)
from retry import retry
from lisa import feature, schema, search_space
@ -164,6 +171,13 @@ KEY_WALA_DISTRO_VERSION = "wala_distro"
KEY_HARDWARE_PLATFORM = "hardware_platform"
ATTRIBUTE_FEATURES = "features"
CLOUD: Dict[str, Dict[str, Any]] = {
"azurecloud": AZURE_PUBLIC_CLOUD,
"azurechinacloud": AZURE_CHINA_CLOUD,
"azuregermancloud": AZURE_GERMAN_CLOUD,
"azureusgovernment": AZURE_US_GOV_CLOUD,
}
@dataclass_json()
@dataclass
@ -194,6 +208,38 @@ class AzureLocation:
capabilities: Dict[str, AzureCapability] = field(default_factory=dict)
@dataclass_json()
@dataclass
class CloudEndpointsSchema:
management: str = ""
resource_manager: str = ""
sql_management: str = ""
batch_resource_id: str = ""
gallery: str = ""
active_directory: str = ""
active_directory_resource_id: str = ""
active_directory_graph_resource_id: str = ""
microsoft_graph_resource_id: str = ""
@dataclass_json()
@dataclass
class CloudSuffixesSchema:
storage_endpoint: str = ""
keyvault_dns: str = ""
sql_server_hostname: str = ""
azure_datalake_store_file_system_endpoint: str = ""
azure_datalake_analytics_catalog_and_job_endpoint: str = ""
@dataclass_json()
@dataclass
class CloudSchema:
name: str
endpoints: CloudEndpointsSchema
suffixes: CloudSuffixesSchema
@dataclass_json()
@dataclass
class AzurePlatformSchema:
@ -216,6 +262,10 @@ class AzurePlatformSchema:
validate=validate.Regexp(constants.GUID_REGEXP),
),
)
cloud_raw: Optional[Union[Dict[str, Any], str]] = field(
default=None, metadata=field_metadata(data_key="cloud")
)
_cloud: InitVar[Cloud] = None
shared_resource_group_name: str = AZURE_SHARED_RG_NAME
resource_group_name: str = field(default="")
@ -295,6 +345,64 @@ class AzurePlatformSchema:
if not self.locations:
self.locations = LOCATIONS
@property
def cloud(self) -> Cloud:
# this is a safe guard and prevent mypy error on typing
if not hasattr(self, "_cloud"):
self._cloud: Cloud = None
cloud: Cloud = self._cloud
if not cloud:
# if pass str into cloud, it should be one of below values, case insensitive
# azurecloud
# azurechinacloud
# azuregermancloud
# azureusgovernment
# example
# cloud: AzureCloud
if isinstance(self.cloud_raw, str):
cloud = CLOUD.get(self.cloud_raw.lower(), None)
assert cloud, (
f"cannot find cloud type {self.cloud_raw},"
f" current support list is {list(CLOUD.keys())}"
)
# if pass dict to construct a cloud instance, the full example is
# cloud:
# name: AzureCloud
# endpoints:
# management: https://management.core.windows.net/
# resource_manager: https://management.azure.com/
# sql_management: https://management.core.windows.net:8443/
# batch_resource_id: https://batch.core.windows.net/
# gallery: https://gallery.azure.com/
# active_directory: https://login.microsoftonline.com
# active_directory_resource_id: https://management.core.windows.net/
# active_directory_graph_resource_id: https://graph.windows.net/
# microsoft_graph_resource_id: https://graph.microsoft.com/
# suffixes:
# storage_endpoint: core.windows.net
# keyvault_dns: .vault.azure.net
# sql_server_hostname: .database.windows.net
# azure_datalake_store_file_system_endpoint: azuredatalakestore.net
# azure_datalake_analytics_catalog_and_job_endpoint: azuredatalakeanalytics.net # noqa: E501
elif isinstance(self.cloud_raw, dict):
cloudschema = schema.load_by_type(CloudSchema, self.cloud_raw)
cloud = Cloud(
cloudschema.name, cloudschema.endpoints, cloudschema.suffixes
)
else:
# by default use azure public cloud
cloud = AZURE_PUBLIC_CLOUD
self._cloud = cloud
return cloud
@cloud.setter
def cloud(self, value: Optional[CloudSchema]) -> None:
self._cloud = value
if value is None:
self.cloud_raw = None
else:
self.cloud_raw = value.to_dict() # type: ignore
class AzurePlatform(Platform):
_diagnostic_storage_container_pattern = re.compile(
@ -311,6 +419,7 @@ class AzurePlatform(Platform):
# for type detection
self.credential: DefaultAzureCredential
self.cloud: Cloud
# It has to be defined after the class definition is loaded. So it
# cannot be a class level variable.
@ -463,6 +572,7 @@ class AzurePlatform(Platform):
check_or_create_resource_group(
self.credential,
subscription_id=self.subscription_id,
cloud=self.cloud,
resource_group_name=resource_group_name,
location=location,
log=log,
@ -581,6 +691,7 @@ class AzurePlatform(Platform):
container_client = get_or_create_storage_container(
credential=self.credential,
subscription_id=self.subscription_id,
cloud=self.cloud,
account_name=storage_name,
container_name=container_name,
resource_group_name=self._azure_runbook.shared_resource_group_name,
@ -778,18 +889,21 @@ class AzurePlatform(Platform):
self._azure_runbook = azure_runbook
self.subscription_id = azure_runbook.subscription_id
self.cloud = azure_runbook.cloud
self._initialize_credential()
check_or_create_resource_group(
self.credential,
self.subscription_id,
self.cloud,
azure_runbook.shared_resource_group_name,
azure_runbook.shared_resource_group_location,
self._log,
)
self._rm_client = get_resource_management_client(
self.credential, self.subscription_id
self.credential, self.subscription_id, self.cloud
)
def _initialize_credential(self) -> None:
@ -814,9 +928,16 @@ class AzurePlatform(Platform):
] = azure_runbook.service_principal_client_id
if azure_runbook.service_principal_key:
os.environ["AZURE_CLIENT_SECRET"] = azure_runbook.service_principal_key
credential = DefaultAzureCredential()
with SubscriptionClient(credential) as self._sub_client:
credential = DefaultAzureCredential(
authority=self.cloud.endpoints.active_directory,
)
with SubscriptionClient(
credential,
base_url=self.cloud.endpoints.resource_manager,
credential_scopes=[self.cloud.endpoints.resource_manager + "/.default"],
) as self._sub_client:
# suppress warning message by search for different credential types
azure_identity_logger = logging.getLogger("azure.identity")
azure_identity_logger.setLevel(logging.ERROR)
@ -1329,6 +1450,7 @@ class AzurePlatform(Platform):
check_or_create_storage_account(
self.credential,
self.subscription_id,
self.cloud,
storage_account_name,
self._azure_runbook.shared_resource_group_name,
location,
@ -2030,6 +2152,7 @@ class AzurePlatform(Platform):
container_client = get_or_create_storage_container(
credential=self.credential,
subscription_id=self.subscription_id,
cloud=self.cloud,
account_name=result_dict["account_name"],
container_name=result_dict["container_name"],
resource_group_name=result_dict["resource_group_name"],

Просмотреть файл

@ -201,6 +201,7 @@ class VhdTransformer(Transformer):
check_or_create_storage_account(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=runbook.storage_account_name,
resource_group_name=runbook.shared_resource_group_name,
location=location,
@ -209,6 +210,7 @@ class VhdTransformer(Transformer):
container_client = get_or_create_storage_container(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=runbook.storage_account_name,
container_name=runbook.container_name,
resource_group_name=runbook.shared_resource_group_name,
@ -526,6 +528,7 @@ class SharedGalleryImageTransformer(Transformer):
check_or_create_resource_group(
platform.credential,
platform.subscription_id,
platform.cloud,
runbook.gallery_resource_group_name,
runbook.gallery_location,
self._log,

Просмотреть файл

@ -462,6 +462,7 @@ class Xfstesting(TestSuite):
check_or_create_storage_account(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_account_name,
resource_group_name=resource_group_name,
location=location,
@ -471,6 +472,7 @@ class Xfstesting(TestSuite):
fs_url_dict[share_name] = get_or_create_file_share(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_account_name,
file_share_name=share_name,
resource_group_name=resource_group_name,
@ -479,6 +481,7 @@ class Xfstesting(TestSuite):
account_credential = get_storage_credential(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_account_name,
resource_group_name=resource_group_name,
)
@ -507,6 +510,7 @@ class Xfstesting(TestSuite):
delete_file_share(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_account_name,
file_share_name=share_name,
resource_group_name=resource_group_name,
@ -515,6 +519,7 @@ class Xfstesting(TestSuite):
delete_storage_account(
credential=platform.credential,
subscription_id=platform.subscription_id,
cloud=platform.cloud,
account_name=storage_account_name,
resource_group_name=resource_group_name,
log=log,

Просмотреть файл

@ -49,6 +49,7 @@ azure = [
"azure-mgmt-storage ~= 20.0.0",
"azure-storage-blob ~= 12.11.0",
"azure-storage-file-share ~= 12.4.0",
"msrestazure ~= 0.6.4",
"cachetools ~= 5.2.0",
"requests",
"Pillow ~= 9.4.0",