support deploy azure environment, and refactoring

1. support azure deployment and add arm template.
2. remove envs, use constants to pass variables like path.
3. add clone to copy from runbook to actual environment.
4. refine runner to delete env on failure.
5. refine schema to support listable validator, and rich configs.
6. other minor improvements.
This commit is contained in:
Chi Song 2020-08-27 18:12:57 +08:00
Родитель 5822277899
Коммит 9c67a64149
24 изменённых файлов: 1092 добавлений и 209 удалений

Просмотреть файл

@ -3,7 +3,7 @@ from __future__ import annotations
from abc import ABCMeta, abstractmethod
from enum import Enum
from lisa.util.exceptions import LisaException
from lisa.util import LisaException
from lisa.util.logger import get_logger
ActionStatus = Enum(

Просмотреть файл

@ -12,8 +12,7 @@ from lisa.sut_orchestrator.ready import ReadyPlatform
from lisa.test_runner.lisarunner import LISARunner
from lisa.testselector import select_testcases
from lisa.testsuite import TestCaseData
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
from lisa.util.logger import get_logger
from lisa.util.module import import_module

Просмотреть файл

@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Optional
from lisa import schema
from lisa.node import Nodes
from lisa.util.exceptions import LisaException
from lisa.util import ContextMixin, LisaException
from lisa.util.logger import get_logger
if TYPE_CHECKING:
@ -19,14 +19,13 @@ _default_no_name = "_no_name_default"
_get_init_logger = partial(get_logger, "init", "env")
class Environment(object):
class Environment(ContextMixin):
def __init__(self) -> None:
self.nodes: Nodes = Nodes()
self.name: str = ""
self.is_ready: bool = False
self.platform: Optional[Platform] = None
self.runbook: Optional[schema.Environment] = None
self._default_node: Optional[Node] = None
self._log = get_logger("env", self.name)
@ -44,14 +43,14 @@ class Environment(object):
# it's a spec
nodes_spec.append(node_runbook)
has_default_node = environment._validate_single_default(
has_default_node = environment.__validate_single_default(
has_default_node, node_runbook.is_default
)
# validate template and node not appear together
if environment_runbook.template is not None:
is_default = environment_runbook.template.is_default
has_default_node = environment._validate_single_default(
has_default_node = environment.__validate_single_default(
has_default_node, is_default
)
for i in range(environment_runbook.template.node_count):
@ -78,7 +77,16 @@ class Environment(object):
def close(self) -> None:
self.nodes.close()
def _validate_single_default(
def clone(self) -> Environment:
cloned = Environment()
cloned.runbook = copy.deepcopy(self.runbook)
cloned.nodes = self.nodes
cloned.platform = self.platform
cloned.name = f"inst_{self.name}"
cloned._log = get_logger("env", self.name)
return cloned
def __validate_single_default(
self, has_default: bool, is_default: Optional[bool]
) -> bool:
if is_default:

Просмотреть файл

@ -5,8 +5,7 @@ from abc import ABC, abstractmethod
from hashlib import sha256
from typing import TYPE_CHECKING, Dict, List, Optional, Type, TypeVar, Union, cast
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
from lisa.util.logger import get_logger
from lisa.util.perf_timer import create_timer
from lisa.util.process import ExecutableResult, Process

Просмотреть файл

@ -6,7 +6,7 @@ from pathlib import Path
from retry import retry # type: ignore
from lisa.parameter_parser.argparser import parse_args
from lisa.util import constants, env
from lisa.util import constants
from lisa.util.logger import get_logger, set_level, set_log_file
@ -22,25 +22,29 @@ def create_run_path(root_path: Path) -> Path:
def main() -> None:
local_path = Path("runtime").joinpath("runs").absolute()
runtime_root = Path("runtime").absolute()
constants.CACHE_PATH = runtime_root.joinpath("cache")
constants.CACHE_PATH.mkdir(parents=True, exist_ok=True)
# create run root path
run_path = create_run_path(local_path)
local_path = local_path.joinpath(run_path)
runs_path = runtime_root.joinpath("runs")
logic_path = create_run_path(runs_path)
local_path = runtime_root.joinpath(logic_path)
local_path.mkdir(parents=True)
constants.RUN_ID = run_path.name
env.set_env(env.KEY_RUN_LOCAL_PATH, str(local_path))
env.set_env(env.KEY_RUN_PATH, str(run_path))
constants.RUN_ID = logic_path.name
constants.RUN_LOCAL_PATH = local_path
constants.RUN_LOGIC_PATH = logic_path
args = parse_args()
set_log_file(f"{local_path}/lisa-host.log")
set_log_file(f"{runtime_root}/lisa-host.log")
log = get_logger()
log.info(f"Python version: {sys.version}")
log.info(f"local time: {datetime.now().astimezone()}")
log.info(f"command line args: {sys.argv}")
log.info(f"run local path: {env.get_run_local_path()}")
log.info(f"run local path: {runtime_root}")
if args.debug:
log_level = DEBUG

Просмотреть файл

@ -3,13 +3,12 @@ from __future__ import annotations
import pathlib
import random
from collections import UserDict
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeVar, Union, cast
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar, Union
from lisa import schema
from lisa.executable import Tools
from lisa.tools import Echo, Uname
from lisa.util import constants, env
from lisa.util.exceptions import LisaException
from lisa.util import ContextMixin, LisaException, constants
from lisa.util.logger import get_logger
from lisa.util.process import ExecutableResult, Process
from lisa.util.shell import ConnectionInfo, LocalShell, Shell, SshShell
@ -17,12 +16,12 @@ from lisa.util.shell import ConnectionInfo, LocalShell, Shell, SshShell
T = TypeVar("T")
class Node:
class Node(ContextMixin):
def __init__(
self,
index: int,
is_remote: bool = True,
spec: Optional[Dict[str, object]] = None,
spec: Optional[schema.NodeSpec] = None,
is_default: bool = False,
id_: str = "",
) -> None:
@ -54,7 +53,7 @@ class Node:
@staticmethod
def create(
index: int,
spec: Optional[Dict[str, object]] = None,
spec: Optional[schema.NodeSpec] = None,
node_type: str = constants.ENVIRONMENTS_NODES_REMOTE,
is_default: bool = False,
) -> Node:
@ -65,10 +64,7 @@ class Node:
else:
raise LisaException(f"unsupported node_type '{node_type}'")
node = Node(index, spec=spec, is_remote=is_remote, is_default=is_default)
node._log.debug(
f"created node '{node_type}', isDefault: {is_default}, "
f"isRemote: {is_remote}"
)
node._log.debug(f"created, type: '{node_type}', isDefault: {is_default}")
return node
def set_connection_info(
@ -132,57 +128,65 @@ class Node:
self._initialize()
return self._is_linux
def close(self) -> None:
self.shell.close()
def _initialize(self) -> None:
if not self._is_initialized:
# prevent loop calls, set _isInitialized to True first
self._is_initialized = True
self._log.debug(f"initializing node {self.name}")
self.shell.initialize()
uname = self.tools[Uname]
(
self.kernel_release,
self.kernel_version,
self.hardware_platform,
self.operating_system,
) = uname.get_linux_information(no_error_log=True)
if (not self.kernel_release) or ("Linux" not in self.operating_system):
self._is_linux = False
if self._is_linux:
self._log.info(
f"initialized Linux node '{self.name}', "
f"kernelRelease: {self.kernel_release}, "
f"kernelVersion: {self.kernel_version}"
f"hardwarePlatform: {self.hardware_platform}"
)
else:
self._log.info(f"initialized Windows node '{self.name}', ")
# set working path
if self.is_remote:
assert self.shell
assert self._connection_info
if self.is_linux:
remote_root_path = pathlib.Path("$HOME")
try:
self.shell.initialize()
uname = self.tools[Uname]
(
self.kernel_release,
self.kernel_version,
self.hardware_platform,
self.operating_system,
) = uname.get_linux_information(no_error_log=True)
if (not self.kernel_release) or ("Linux" not in self.operating_system):
self._is_linux = False
if self._is_linux:
self._log.info(
f"initialized Linux node '{self.name}', "
f"kernelRelease: {self.kernel_release}, "
f"kernelVersion: {self.kernel_version}"
f"hardwarePlatform: {self.hardware_platform}"
)
else:
remote_root_path = pathlib.Path("%TEMP%")
working_path = remote_root_path.joinpath(
constants.PATH_REMOTE_ROOT, env.get_run_path()
).as_posix()
self._log.info(f"initialized Windows node '{self.name}', ")
# expand environment variables in path
echo = self.tools[Echo]
result = echo.run(working_path, shell=True)
# set working path
if self.is_remote:
assert self.shell
assert self._connection_info
# PurePath is more reasonable here, but spurplus doesn't support it.
if self.is_linux:
self.working_path = pathlib.PurePosixPath(result.stdout)
if self.is_linux:
remote_root_path = pathlib.Path("$HOME")
else:
remote_root_path = pathlib.Path("%TEMP%")
working_path = remote_root_path.joinpath(
constants.PATH_REMOTE_ROOT, constants.RUN_LOGIC_PATH
).as_posix()
# expand environment variables in path
echo = self.tools[Echo]
result = echo.run(working_path, shell=True)
# PurePath is more reasonable here, but spurplus doesn't support it.
if self.is_linux:
self.working_path = pathlib.PurePosixPath(result.stdout)
else:
self.working_path = pathlib.PureWindowsPath(result.stdout)
else:
self.working_path = pathlib.PureWindowsPath(result.stdout)
else:
self.working_path = pathlib.Path(env.get_run_local_path())
self._log.debug(f"working path is: '{self.working_path}'")
self.shell.mkdir(self.working_path, parents=True, exist_ok=True)
self.working_path = constants.RUN_LOCAL_PATH
self.shell.mkdir(self.working_path, parents=True, exist_ok=True)
self._log.debug(f"working path is: '{self.working_path}'")
except Exception as identifier:
# initialize failed, and make sure it reverses to not initialized state
self._is_initialized = False
raise identifier
def _execute(
self,
@ -205,9 +209,6 @@ class Node:
)
return process
def close(self) -> None:
self.shell.close()
if TYPE_CHECKING:
NodesDict = UserDict[str, Node]
@ -217,6 +218,7 @@ else:
class Nodes(NodesDict):
def __init__(self) -> None:
super().__init__()
self._default: Optional[Node] = None
self._list: List[Node] = list()
@ -236,6 +238,10 @@ class Nodes(NodesDict):
self._default = default
return self._default
def list(self) -> Iterable[Node]:
for node in self._list:
yield node
def __getitem__(self, key: Union[int, str]) -> Node:
found = None
if not self._list:
@ -300,12 +306,11 @@ class Nodes(NodesDict):
def from_spec(
self,
spec: Dict[str, object],
spec: schema.NodeSpec,
node_type: str = constants.ENVIRONMENTS_NODES_REMOTE,
) -> Node:
is_default = cast(bool, spec.get(constants.IS_DEFAULT, False))
node = Node.create(
len(self._list), spec=spec, node_type=node_type, is_default=is_default
len(self._list), spec=spec, node_type=node_type, is_default=spec.is_default
)
self._list.append(node)
return node

Просмотреть файл

@ -37,7 +37,7 @@ def validate(data: Any) -> schema.Runbook:
global _schema
if not _schema:
_schema = schema.Runbook.schema() # type:ignore
_schema = schema.Runbook.schema() # type: ignore
assert _schema
runbook = cast(schema.Runbook, _schema.load(data))

Просмотреть файл

@ -6,8 +6,7 @@ from functools import partial
from typing import TYPE_CHECKING, Any, List, Optional, Type, cast
from lisa import schema
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
from lisa.util.logger import get_logger
if TYPE_CHECKING:
@ -72,11 +71,11 @@ class Platform(ABC):
return environment
def delete_environment(self, environment: Environment) -> None:
self._log.info(f"environment {environment.name} deleting")
self._log.debug(f"environment {environment.name} deleting")
environment.close()
self._delete_environment(environment)
environment.is_ready = False
self._log.info(f"environment {environment.name} deleted")
self._log.debug(f"environment {environment.name} deleted")
if TYPE_CHECKING:

Просмотреть файл

@ -8,10 +8,9 @@ from dataclasses_json import ( # type: ignore
config,
dataclass_json,
)
from marshmallow import fields, validate
from marshmallow import ValidationError, fields, validate
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
"""
Schema is dealt with three components,
@ -32,14 +31,79 @@ def metadata(
if field_function is None:
field_function = fields.Raw
assert field_function
return config(mm_field=field_function(*args, **kwargs))
encoder = kwargs.pop("encoder", None)
decoder = kwargs.pop("decoder", None)
# keep data_key for underlying marshmallow
field_name = kwargs.get("data_key")
return config(
field_name=field_name,
encoder=encoder,
decoder=decoder,
mm_field=field_function(*args, **kwargs),
)
T = TypeVar("T", bound=DataClassJsonMixin)
U = TypeVar("U")
class ListableValidator(validate.Validator):
default_message = ""
def __init__(
self,
value_type: U,
value_validator: Optional[
Union[validate.Validator, List[validate.Validator]]
] = None,
error: str = "",
) -> None:
self._value_type: Any = value_type
if value_validator is None:
self._inner_validator: List[validate.Validator] = []
elif callable(value_validator):
self._inner_validator = [value_validator]
elif isinstance(value_validator, list):
self._inner_validator = list(value_validator)
else:
raise ValueError(
"The 'value_validator' parameter must be a callable "
"or a collection of callables."
)
self.error: str = error or self.default_message
def _repr_args(self) -> str:
return f"_inner_validator={self._inner_validator}"
def _format_error(self, value: Any) -> str:
return self.error.format(input=value)
def __call__(self, value: Any) -> Any:
if isinstance(value, self._value_type):
if self._inner_validator:
for validator in self._inner_validator:
validator(value) # type: ignore
elif isinstance(value, list):
for value_item in value:
assert isinstance(value_item, self._value_type), (
f"must be '{self._value_type}' but '{value_item}' "
f"is '{type(value_item)}'"
)
if self._inner_validator:
for validator in self._inner_validator:
validator(value_item) # type: ignore
elif value is not None:
raise ValidationError(
f"must be Union[{self._value_type}, List[{self._value_type}]], "
f"but '{value}' is '{type(value)}'"
)
return value
class ExtendableSchemaMixin:
def get_extended_runbook(self, runbook_type: Type[T], field_name: str = "") -> T:
def get_extended_runbook(
self, runbook_type: Type[T], field_name: str = ""
) -> Optional[T]:
"""
runbook_type: type of runbook
field_name: the field name which stores the data, if it's "", get it from type
@ -56,9 +120,11 @@ class ExtendableSchemaMixin:
assert hasattr(self, field_name), f"cannot find attr '{field_name}'"
customized_runbook = getattr(self, field_name)
if not isinstance(customized_runbook, runbook_type):
if customized_runbook is not None and not isinstance(
customized_runbook, runbook_type
):
raise LisaException(
f"runbook type mismatch, expected type: {runbook_type} "
f"extended type mismatch, expected type: {runbook_type} "
f"data type: {type(customized_runbook)}"
)
return customized_runbook
@ -247,6 +313,13 @@ class RemoteNode:
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class IntegerRange:
min: int
max: int
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class NodeSpec(ExtendableSchemaMixin):
@ -260,9 +333,9 @@ class NodeSpec(ExtendableSchemaMixin):
is_default: bool = field(default=False)
# optional, if there is only one artifact.
artifact: str = field(default="")
cpu_count: int = field(
core_count: int = field(
default=1,
metadata=metadata(data_key="cpuCount", validate=validate.Range(min=1)),
metadata=metadata(data_key="coreCount", validate=validate.Range(min=1)),
)
memory_gb: int = field(
default=1,
@ -347,6 +420,13 @@ class Platform(ExtendableSchemaMixin):
supported_types: ClassVar[List[str]] = [constants.PLATFORM_READY]
admin_username: str = "lisa"
admin_password: str = ""
admin_private_key_file: str = ""
# True means not to delete an environment, even it's created by lisa
reserve_environment: bool = False
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
platform_fields = dataclass_fields(self)
# get type field to analyze if mismatch type info is set.
@ -362,6 +442,16 @@ class Platform(ExtendableSchemaMixin):
f"'{platform_field.name}' mismatch"
)
if self.type != constants.PLATFORM_READY:
if self.admin_password and self.admin_private_key_file:
raise LisaException(
"only one of admin_password and admin_private_key_file can be set"
)
elif not self.admin_password and not self.admin_private_key_file:
raise LisaException(
"one of admin_password and admin_private_key_file must be set"
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
@ -376,39 +466,16 @@ class Criteria:
area: Optional[str] = None
category: Optional[str] = None
# the runbook is complex to convert, so manual overwrite it in __post_init__.
priority: Optional[Union[int, List[int]]] = field(default=None)
priority: Optional[Union[int, List[int]]] = field(
default=None,
metadata=metadata(
validate=ListableValidator(int, validate.Range(min=0, max=3))
),
)
# tag is a simple way to include test cases within same topic.
tag: Optional[Union[str, List[str]]] = field(default=None)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if isinstance(self.priority, int):
if self.priority < 0 or self.priority > 3:
raise LisaException(
f"priority range should be 0 to 3, but '{self.priority}'"
)
elif isinstance(self.priority, list):
for priority in self.priority:
if priority < 0 or priority > 3:
raise LisaException(
f"priority range should be 0 to 3, but '{priority}'"
)
elif self.priority is not None:
raise LisaException(
f"priority must be Union[int, List[int]], but '{self.priority}' "
f"is '{type(self.priority)}'"
)
if isinstance(self.tag, list):
for tag in self.tag:
assert isinstance(
tag, str
), f"tag must be str, but '{tag}' is '{type(tag)}'"
elif not isinstance(self.tag, str):
if self.tag is not None:
raise LisaException(
f"tag must be Union[str, List[str]], "
f"but '{self.tag}' is '{type(self.tag)}'"
)
tag: Optional[Union[str, List[str]]] = field(
default=None, metadata=metadata(validate=ListableValidator(str))
)
@dataclass_json(letter_case=LetterCase.CAMEL)

Просмотреть файл

@ -0,0 +1,318 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"location": {
"type": "string",
"metadata": {
"description": "location"
}
},
"nodes": {
"type": "array",
"metadata": {
"description": "all nodes"
}
},
"adminUsername": {
"type": "string",
"metadata": {
"description": "user name"
}
},
"adminPassword": {
"type": "string",
"metadata": {
"description": "password"
}
},
"adminKeyData": {
"type": "string",
"metadata": {
"description": "public key data"
}
}
},
"variables": {
"nodes": "[parameters('nodes')]",
"nodeCount": "[length(parameters('nodes'))]",
"location": "[parameters('location')]",
"resourceGroupName": "[resourceGroup().name]",
"publicIPv4AddressName": "lisa-publicIPv4Address",
"adminUserName": "[parameters('adminUserName')]",
"adminPassword": "[parameters('adminPassword')]",
"adminKeyData": "[parameters('adminKeyData')]",
"virtualNetworkName": "lisa-virtualNetwork",
"defaultSubnet": "lisa-subnetForPrimaryNIC",
"availabilitySetName": "lisa-availabilitySet",
"lbName": "lisa-loadBalancer",
"defaultSubnetId": "[concat(variables('vnetId'),'/subnets/', variables('defaultSubnet'))]",
"vnetId": "[resourceId('Microsoft.Network/virtualNetworks/', variables('virtualNetworkName'))]",
"lbId": "[resourceId('Microsoft.Network/loadBalancers/', variables('lbName'))]",
"frontEndIPv4ConfigID": "[concat(variables('lbId'),'/frontendIPConfigurations/LoadBalancerFrontEndIPv4')]"
},
"resources": [
{
"apiVersion": "2019-07-01",
"type": "Microsoft.Compute/availabilitySets",
"name": "[variables('availabilitySetName')]",
"location": "[variables('location')]",
"sku": {
"name": "Aligned"
},
"properties": {
"platformFaultDomainCount": 2,
"platformUpdateDomainCount": 5
}
},
{
"apiVersion": "2020-05-01",
"type": "Microsoft.Network/loadBalancers",
"name": "[variables('lbName')]",
"location": "[variables('location')]",
"dependsOn": [
"[resourceId('Microsoft.Network/publicIPAddresses/', variables('publicIPv4AddressName'))]"
],
"properties": {
"frontendIPConfigurations": [
{
"name": "LoadBalancerFrontEndIPv4",
"properties": {
"publicIPAddress": {
"id": "[resourceId('Microsoft.Network/publicIPAddresses', variables('publicIPv4AddressName'))]"
}
}
}
],
"backendAddressPools": [
{
"name": "BackendPoolIPv4"
}
],
"copy": [
{
"name": "inboundNatRules",
"count": "[variables('nodeCount')]",
"input": {
"name": "[concat(variables('nodes')[copyIndex('inboundNatRules')]['name'], '-ssh')]",
"properties": {
"frontendIPConfiguration": {
"id": "[variables('frontEndIPv4ConfigID')]"
},
"protocol": "tcp",
"frontendPort": "[add(1111, copyIndex('inboundNatRules'))]",
"backendPort": "22",
"enableFloatingIP": false
}
}
}
]
}
},
{
"apiVersion": "2020-05-01",
"type": "Microsoft.Network/publicIPAddresses",
"location": "[variables('location')]",
"name": "[variables('publicIPv4AddressName')]",
"properties": {
"publicIPAllocationMethod": "Dynamic"
}
},
{
"apiVersion": "2020-05-01",
"type": "Microsoft.Network/virtualNetworks",
"name": "[variables('virtualNetworkName')]",
"location": "[variables('location')]",
"properties": {
"addressSpace": {
"addressPrefixes": [
"10.0.0.0/16"
]
},
"subnets": [
{
"name": "[variables('defaultSubnet')]",
"properties": {
"addressPrefix": "10.0.0.0/24"
}
}
]
}
},
{
"apiVersion": "2020-05-01",
"type": "Microsoft.Network/networkInterfaces",
"copy": {
"name": "nicCopy",
"count": "[variables('nodeCount')]"
},
"name": "[concat(variables('nodes')[copyIndex('nicCopy')]['name'], '-nic')]",
"location": "[variables('location')]",
"dependsOn": [
"[resourceId('Microsoft.Network/publicIPAddresses/', variables('publicIPv4AddressName'))]",
"[variables('vnetId')]",
"[variables('lbId')]"
],
"properties": {
"ipConfigurations": [
{
"name": "IPv4Config",
"properties": {
"privateIPAddressVersion": "IPv4",
"loadBalancerBackendAddressPools": [
{
"id": "[concat(variables('lbId'), '/backendAddressPools/BackendPoolIPv4')]"
}
],
"loadBalancerInboundNatRules": [
{
"id": "[concat(variables('lbId'), '/inboundNatRules/', variables('nodes')[copyIndex('nicCopy')]['name'], '-ssh')]"
}
],
"subnet": {
"id": "[variables('defaultSubnetId')]"
},
"privateIPAllocationMethod": "Dynamic"
}
}
]
}
},
{
"apiVersion": "2019-03-01",
"type": "Microsoft.Compute/images",
"copy": {
"name": "imageCopy",
"count": "[variables('nodeCount')]"
},
"condition": "[not(empty(variables('nodes')[copyIndex('imageCopy')]['vhd']))]",
"name": "[concat(variables('nodes')[copyIndex('imageCopy')]['name'], '-image')]",
"location": "[variables('location')]",
"properties": {
"storageProfile": {
"osDisk": {
"osType": "Linux",
"osState": "Generalized",
"blobUri": "[variables('nodes')[copyIndex('imageCopy')]['vhd']]",
"storageAccountType": "Standard_LRS"
}
},
"hyperVGeneration": "V1"
}
},
{
"apiVersion": "2019-07-01",
"type": "Microsoft.Compute/virtualMachines",
"copy": {
"name": "vmCopy",
"count": "[variables('nodeCount')]"
},
"name": "[variables('nodes')[copyIndex('vmCopy')]['name']]",
"location": "[variables('location')]",
"tags": { "RG": "[variables('resourceGroupName')]" },
"dependsOn": [
"[resourceId('Microsoft.Compute/availabilitySets', variables('availabilitySetName'))]",
"[resourceId('Microsoft.Network/networkInterfaces', concat(variables('nodes')[copyIndex('vmCopy')]['name'], '-nic'))]",
"[resourceId('Microsoft.Compute/images', concat(variables('nodes')[copyIndex('vmCopy')]['name'], '-image'))]"
],
"properties": {
"availabilitySet": {
"id": "[resourceId('Microsoft.Compute/availabilitySets',variables('availabilitySetName'))]"
},
"hardwareProfile": {
"vmSize": "[variables('nodes')[copyIndex('vmCopy')]['vmSize']]"
},
"osProfile": {
"computername": "[variables('nodes')[copyIndex('vmCopy')]['name']]",
"adminUsername": "[variables('adminUserName')]",
"adminPassword": "[if(empty(variables('adminKeyData')), variables('adminPassword'), json('null'))]",
"linuxConfiguration": "[if(empty(variables('adminKeyData')), json('null'), lisa.getLinuxConfiguration(concat('/home/', variables('adminUserName'), '/.ssh/authorized_keys'), variables('adminKeyData')))]"
},
"storageProfile": {
"imageReference": "[if(not(empty(variables('nodes')[copyIndex('vmCopy')]['vhd'])), lisa.getOsDiskVhd(variables('nodes')[copyIndex('vmCopy')]['name']), lisa.getOsDiskGallery(variables('nodes')[copyIndex('vmCopy')]))]",
"osDisk": {
"name": "[concat(variables('nodes')[copyIndex('vmCopy')]['name'], '-osDisk')]",
"managedDisk": {
"storageAccountType": "Standard_LRS"
},
"caching": "ReadWrite",
"createOption": "FromImage"
}
},
"networkProfile": {
"networkInterfaces": [
{
"id": "[resourceId('Microsoft.Network/networkInterfaces', concat(variables('nodes')[copyIndex('vmCopy')]['name'], '-nic'))]"
}
]
},
"diagnosticsProfile": {
"bootDiagnostics": {
"enabled": true,
"storageUri": "[reference(resourceId('default-storage-westus2', 'Microsoft.Storage/storageAccounts', 'lisav2westus2'), '2015-06-15').primaryEndpoints['blob']]"
}
}
}
}
],
"functions": [
{
"namespace": "lisa",
"members": {
"getOsDiskGallery": {
"parameters": [
{
"name": "node",
"type": "object"
}
],
"output": {
"type": "object",
"value": "[parameters('node')['gallery']]"
}
},
"getOsDiskVhd": {
"parameters": [
{
"name": "vmName",
"type": "string"
}
],
"output": {
"type": "object",
"value": {
"id": "[resourceId('Microsoft.Compute/images', concat(parameters('vmName'), '-image'))]"
}
}
},
"getLinuxConfiguration": {
"parameters": [
{
"name": "keyPath",
"type": "string"
},
{
"name": "publicKeyData",
"type": "string"
}
],
"output": {
"type": "object",
"value": {
"disablePasswordAuthentication": true,
"ssh": {
"publicKeys": [
{
"path": "[parameters('keyPath')]",
"keyData": "[parameters('publicKeyData')]"
}
]
},
"provisionVMAgent": true
}
}
}
}
}
]
}

Просмотреть файл

@ -1,24 +1,151 @@
import json
import logging
import os
import re
from collections import UserDict
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional, Type
from datetime import datetime
from functools import lru_cache
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, cast
import yaml
from azure.identity import DefaultAzureCredential # type: ignore
from azure.mgmt.compute import ComputeManagementClient # type: ignore
from azure.mgmt.compute.models import ResourceSku, VirtualMachine # type: ignore
from azure.mgmt.network import NetworkManagementClient # type: ignore
from azure.mgmt.network.models import InboundNatRule, NetworkInterface # type: ignore
from azure.mgmt.resource import ( # type: ignore
ResourceManagementClient,
SubscriptionClient,
)
from dataclasses_json import LetterCase, dataclass_json # type:ignore
from marshmallow import validate
from azure.mgmt.resource.resources.models import ( # type: ignore
Deployment,
DeploymentMode,
DeploymentProperties,
)
from dataclasses_json import LetterCase, dataclass_json # type: ignore
from marshmallow import fields, validate
from lisa import schema
from lisa.environment import Environment
from lisa.node import Node
from lisa.platform_ import Platform
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants, get_public_key_data
AZURE = "azure"
# used by azure
AZURE_DEPLOYMENT_NAME = "lisa_default_deployment_script"
AZURE_RG_NAME_KEY = "resource_group_name"
VM_SIZE_FALLBACK_PATTERNS = [
re.compile(r"Standard_DS(\d)+_v2"),
re.compile(r"Standard_A(\d)+"),
]
# names in arm template, they should be changed with template together.
RESOURCE_ID_LB = "lisa-loadBalancer"
RESOURCE_ID_PUBLIC_IP = "lisa-publicIPv4Address"
RESOURCE_ID_PORT_POSTFIX = "-ssh"
RESOURCE_ID_NIC_POSTFIX = "-nic"
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureLocation:
updated_time: datetime = field(
default_factory=datetime.now,
metadata=schema.metadata(
fields.DateTime,
encoder=datetime.isoformat,
decoder=datetime.fromisoformat,
data_key="updatedTime",
format="iso",
),
)
location: str = ""
skus_list: List[ResourceSku] = field(
default_factory=list, metadata=schema.metadata(data_key="skus")
)
def serialize(self) -> None:
if len(self.skus_list) > 0 and isinstance(self.skus_list[0], ResourceSku):
skus_list: List[Any] = list()
for sku_obj in self.skus_list:
skus_list.append(sku_obj.as_dict())
self.skus_list = skus_list
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
skus: Dict[str, ResourceSku] = dict()
for sku in self.skus_list:
sku_obj = ResourceSku.from_dict(sku)
skus[sku_obj.name] = sku_obj
self.skus = skus
if TYPE_CHECKING:
LocationsDict = UserDict[str, Optional[AzureLocation]]
else:
LocationsDict = UserDict
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureLocations(LocationsDict):
locations: List[AzureLocation] = field(default_factory=list)
def __getitem__(self, location: str) -> Optional[AzureLocation]:
for existing_location in self.locations:
if location == existing_location.location:
return existing_location
return None
def __setitem__(self, _: str, location: Optional[AzureLocation]) -> None:
assert location
for existing_location in self.locations:
if location.location == existing_location.location:
self.locations.remove(existing_location)
self.locations.append(location)
def serialize(self) -> None:
for location in self.locations:
location.serialize()
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureArmParameterGallery:
publisher: str = "Canonical"
offer: str = "UbuntuServer"
sku: str = "18.04-LTS"
version: str = "Latest"
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureArmParameterNode:
name: str = ""
vm_size: str = "Standard_A1_v2"
gallery: Optional[AzureArmParameterGallery] = None
vhd: Optional[str] = None
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if self.gallery is None and self.vhd is None:
raise LisaException("either gallery or vhd must be set one")
elif self.gallery and self.vhd:
raise LisaException("only one of gallery or vhd should be set")
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureArmParameter:
location: str = "westus2"
admin_username: str = ""
admin_password: str = ""
admin_key_data: str = ""
nodes: List[AzureArmParameterNode] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
@ -63,13 +190,31 @@ class AzurePlatformSchema:
),
)
# do actual deployment, or pass through for troubleshooting
dry_run: bool = False
# wait resource deleted or not
wait_delete: bool = False
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureNodeSchema:
vm_size: str = field(default="")
vhd: str = ""
@dataclass
class EnvironmentContext:
resource_group_name: str = ""
resource_group_is_created: bool = False
@dataclass
class NodeContext:
vm_name: str = ""
username: str = "lisa"
password: str = ""
private_key_file: str = ""
class AzurePlatform(Platform):
@ -92,40 +237,76 @@ class AzurePlatform(Platform):
def _request_environment(self, environment: Environment) -> Environment:
assert self._rm_client
assert self._azure_runbook
assert environment.runbook, "env data cannot be None"
env_runbook: schema.Environment = environment.runbook
environment_context = environment.get_context(EnvironmentContext)
if self._azure_runbook.resource_group_name:
resource_group_name = self._azure_runbook.resource_group_name
self._log.info(f"reusing resource group: {resource_group_name}")
else:
normalized_run_name = constants.NORMALIZE_PATTERN.sub(
"_", constants.RUN_NAME
)
resource_group_name = f"{normalized_run_name}_e{self._enviornment_counter}"
self._enviornment_counter += 1
self._log.info(f"creating resource group: {resource_group_name}")
environment_context.resource_group_is_created = True
self._get_location_info(self._azure_runbook.location)
environment_context.resource_group_name = resource_group_name
if self._azure_runbook.dry_run:
self._log.info(f"dry_run: {self._azure_runbook.dry_run}")
else:
resource_group = self._rm_client.resource_groups.create_or_update(
self._log.info(
f"creating or updating resource group: {resource_group_name}"
)
self._rm_client.resource_groups.create_or_update(
resource_group_name, {"location": self._azure_runbook.location}
)
self._log.info(f"created resource group is {resource_group}")
nodes_parameters: List[Dict[str, Any]] = []
for node_runbook in env_runbook.nodes:
assert isinstance(node_runbook, schema.NodeSpec)
node_parameter: Dict[str, Any] = dict()
node_parameter["vcpu"] = node_runbook.cpu_count
nodes_parameters.append(node_parameter)
self._rm_client.deployments.validate(nodes_parameters)
try:
deployment_parameters = self._create_deployment_parameters(
resource_group_name, environment
)
self._validate_template(deployment_parameters)
self._deploy(deployment_parameters)
self._initialize_nodes(environment)
except Exception as identifier:
self._delete_environment(environment)
raise identifier
return environment
def _delete_environment(self, environment: Environment) -> None:
pass
environment_context = environment.get_context(EnvironmentContext)
resource_group_name = environment_context.resource_group_name
assert resource_group_name
assert self._azure_runbook
if (
environment_context.resource_group_is_created
and not self._runbook.reserve_environment
and not self._azure_runbook.dry_run
):
assert self._rm_client
self._log.info(
f"deleting resource group: {resource_group_name}, "
f"wait: {self._azure_runbook.wait_delete}"
)
delete_operation = self._rm_client.resource_groups.begin_delete(
resource_group_name
)
if self._azure_runbook.wait_delete:
result = delete_operation.wait()
if result:
raise LisaException(f"error on deleting resource group: {result}")
else:
self._log.debug("not wait deleting")
else:
self._log.info(f"skipped to delete resource group: {resource_group_name}")
def _initialize(self) -> None:
# set needed environment variables for authentication
@ -154,3 +335,250 @@ class AzurePlatform(Platform):
self._rm_client = ResourceManagementClient(
credential=self._credential, subscription_id=self._subscription_id
)
@lru_cache
def _load_template(self) -> Any:
template_file_path = Path(__file__).parent / "arm_template.json"
with open(template_file_path, "r") as f:
template = json.load(f)
return template
@lru_cache
def _get_location_info(self, location: str) -> AzureLocation:
cached_file_name = constants.CACHE_PATH.joinpath("azure_locations.json")
should_refresh: bool = True
location_data: Optional[AzureLocation] = None
if cached_file_name.exists():
with open(cached_file_name, "r") as f:
data = yaml.safe_load(f)
locations_data = cast(
AzureLocations, AzureLocations.schema().load(data) # type:ignore
)
location_data = locations_data.get(location)
else:
locations_data = AzureLocations()
if location_data:
delta = datetime.now() - location_data.updated_time
# refresh cached locations every 5 days.
if delta.days < 5:
should_refresh = False
self._log.debug(
f"{location}: cache used: {location_data.updated_time}, "
f"sku count: {len(location_data.skus)}"
)
else:
self._log.debug(
f"{location}: cache timeout: {location_data.updated_time},"
f"sku count: {len(location_data.skus)}"
)
else:
self._log.debug(f"{location}: no cache found")
if should_refresh:
compute_client = ComputeManagementClient(
credential=self._credential, subscription_id=self._subscription_id
)
all_skus: List[ResourceSku] = []
paged_skus = compute_client.resource_skus.list(
f"location eq '{location}'"
).by_page()
for skus in paged_skus:
for sku in skus:
try:
if sku.resource_type == "virtualMachines":
if sku.restrictions and any(
restriction.type == "Location"
for restriction in sku.restrictions
):
# restricted on this location
continue
all_skus.append(sku)
except Exception as identifier:
self._log.error(f"unknown sku: {sku}")
raise identifier
location_data = AzureLocation(location=location, skus_list=all_skus)
locations_data[location_data.location] = location_data
with open(cached_file_name, "w") as f:
locations_data.serialize()
yaml.safe_dump(locations_data.to_dict(), f) # type: ignore
self._log.debug(
f"{location_data.location}: new data, "
f"sku: {len(location_data.skus_list)}"
)
assert location_data
return location_data
def _create_deployment_parameters(
self, resource_group_name: str, environment: Environment
) -> Dict[str, Any]:
assert environment.runbook, "env data cannot be None"
env_runbook: schema.Environment = environment.runbook
self._log.debug("creating deployment")
# construct parameters
arm_parameters = AzureArmParameter()
arm_parameters.admin_username = self._runbook.admin_username
if self._runbook.admin_private_key_file:
arm_parameters.admin_key_data = get_public_key_data(
self._runbook.admin_private_key_file
)
else:
arm_parameters.admin_password = self._runbook.admin_password
assert self._azure_runbook
arm_parameters.location = self._azure_runbook.location
nodes_parameters: List[AzureArmParameterNode] = []
for node_runbook in env_runbook.nodes:
assert isinstance(node_runbook, schema.NodeSpec)
azure_node_runbook = node_runbook.get_extended_runbook(
AzureNodeSchema, field_name=AZURE
)
gallery = AzureArmParameterGallery()
node_parameter = AzureArmParameterNode(gallery=gallery)
node_parameter.name = f"node-{len(nodes_parameters)}"
if azure_node_runbook:
if azure_node_runbook.vm_size:
node_parameter.vm_size = azure_node_runbook.vm_size
if azure_node_runbook.vhd:
node_parameter.vhd = azure_node_runbook.vhd
node_parameter.gallery = None
nodes_parameters.append(node_parameter)
# init node
node = environment.nodes.from_spec(node_runbook)
node_context = node.get_context(NodeContext)
node_context.vm_name = node_parameter.name
node_context.username = arm_parameters.admin_username
node_context.password = arm_parameters.admin_password
node_context.private_key_file = self._runbook.admin_private_key_file
arm_parameters.nodes = nodes_parameters
# load template
template = self._load_template()
parameters = arm_parameters.to_dict() # type:ignore
parameters = {k: {"value": v} for k, v in parameters.items()}
self._log.debug(f"parameters: {parameters}")
deployment_properties = DeploymentProperties(
mode=DeploymentMode.incremental, template=template, parameters=parameters,
)
return {
AZURE_RG_NAME_KEY: resource_group_name,
"deployment_name": AZURE_DEPLOYMENT_NAME,
"parameters": Deployment(properties=deployment_properties),
}
def _validate_template(self, deployment_parameters: Dict[str, Any]) -> None:
resource_group_name = deployment_parameters[AZURE_RG_NAME_KEY]
self._log.debug("validating deployment")
validate_operation: Any = None
deployments = self._rm_client.deployments
try:
validate_operation = self._rm_client.deployments.begin_validate(
**deployment_parameters
)
result = validate_operation.wait()
if result:
raise LisaException(f"deploy failed: {result}")
except Exception as identifier:
if validate_operation:
deployment = deployments.get(resource_group_name, AZURE_DEPLOYMENT_NAME)
# log more details for troubleshooting
if deployment.properties.provisioning_state == "Failed":
errors = deployment.properties.error.details
for error in errors:
self._log.error(f"failed: {error.code}, {error.message}")
raise identifier
assert result is None, f"validate error: {result}"
def _deploy(self, deployment_parameters: Dict[str, Any]) -> None:
resource_group_name = deployment_parameters[AZURE_RG_NAME_KEY]
self._log.info(f"deploying {resource_group_name}")
deployment_operation: Any = None
deployments = self._rm_client.deployments
try:
deployment_operation = deployments.begin_create_or_update(
**deployment_parameters
)
result = deployment_operation.wait()
if result:
raise LisaException(f"deploy failed: {result}")
except Exception as identifier:
if deployment_operation:
deployment = deployments.get(resource_group_name, AZURE_DEPLOYMENT_NAME)
# log more details for troubleshooting
if deployment.properties.provisioning_state == "Failed":
errors = deployment.properties.error.details
for error in errors:
self._log.error(f"failed: {error.code}, {error.message}")
raise identifier
def _initialize_nodes(self, environment: Environment) -> None:
node_context_map: Dict[str, Node] = dict()
for node in environment.nodes.list():
node_context = node.get_context(NodeContext)
node_context_map[node_context.vm_name] = node
compute_client = ComputeManagementClient(
credential=self._credential, subscription_id=self._subscription_id
)
environment_context = environment.get_context(EnvironmentContext)
vms_map: Dict[str, VirtualMachine] = dict()
vms = compute_client.virtual_machines.list(
environment_context.resource_group_name
)
for vm in vms:
vms_map[vm.name] = vm
network_client = NetworkManagementClient(
credential=self._credential, subscription_id=self._subscription_id
)
# load port mappings
nat_rules_map: Dict[str, InboundNatRule] = dict()
load_balancing = network_client.load_balancers.get(
environment_context.resource_group_name, RESOURCE_ID_LB
)
for rule in load_balancing.inbound_nat_rules:
name = rule.name[: -len(RESOURCE_ID_PORT_POSTFIX)]
nat_rules_map[name] = rule
# load nics
nic_map: Dict[str, NetworkInterface] = dict()
network_interfaces = network_client.network_interfaces.list(
environment_context.resource_group_name
)
for nic in network_interfaces:
name = nic.name[: -len(RESOURCE_ID_NIC_POSTFIX)]
nic_map[name] = nic
# get public IP
public_ip_address = network_client.public_ip_addresses.get(
environment_context.resource_group_name, RESOURCE_ID_PUBLIC_IP
).ip_address
for vm_name, node in node_context_map.items():
node_context = node.get_context(NodeContext)
vm = vms_map[vm_name]
nic = nic_map[vm_name]
nat_rule = nat_rules_map[vm_name]
address = nic.ip_configurations[0].private_ip_address
port = nat_rule.backend_port
public_port = nat_rule.frontend_port
node.set_connection_info(
address=address,
port=port,
public_address=public_ip_address,
public_port=public_port,
username=node_context.username,
password=node_context.password,
private_key_file=node_context.private_key_file,
)

Просмотреть файл

@ -1,7 +1,7 @@
from typing import Dict, Iterable, List, cast
from typing import Dict, Iterable, List, Optional, cast
from lisa.action import Action, ActionStatus
from lisa.environment import environments
from lisa.environment import Environment, environments
from lisa.platform_ import Platform
from lisa.testsuite import (
TestCaseData,
@ -46,34 +46,41 @@ class LISARunner(Action):
test_suites[test_case_data.metadata.suite] = test_suite_cases
# request environment
environment = self.platform.request_environment(environments.default)
cloned_environment = environments.default.clone()
environment: Optional[Environment] = None
try:
environment = self.platform.request_environment(cloned_environment)
self._log.info(f"start running {len(test_results)} cases")
for test_suite_metadata in test_suites:
test_suite: TestSuite = test_suite_metadata.test_class(
environment,
test_suites.get(test_suite_metadata, []),
test_suite_metadata,
)
try:
await test_suite.start()
except Exception as identifier:
self._log.error(f"suite[{test_suite_metadata}] failed: {identifier}")
self._log.info(f"start running {len(test_results)} cases")
for test_suite_metadata in test_suites:
test_suite: TestSuite = test_suite_metadata.test_class(
environment,
test_suites.get(test_suite_metadata, []),
test_suite_metadata,
)
try:
await test_suite.start()
except Exception as identifier:
self._log.error(
f"suite[{test_suite_metadata.name}] failed: {identifier}"
)
result_count_dict: Dict[TestStatus, int] = dict()
for result in test_results:
result_count = result_count_dict.get(result.status, 0)
result_count += 1
result_count_dict[result.status] = result_count
result_count_dict: Dict[TestStatus, int] = dict()
for result in test_results:
result_count = result_count_dict.get(result.status, 0)
result_count += 1
result_count_dict[result.status] = result_count
self._log.info("result summary")
self._log.info(f" TOTAL\t: {len(test_results)}")
for key in TestStatus:
self._log.info(f" {key.name}\t: {result_count_dict.get(key, 0)}")
self._log.info("result summary")
self._log.info(f" TOTAL\t: {len(test_results)}")
for key in TestStatus:
self._log.info(f" {key.name}\t: {result_count_dict.get(key, 0)}")
# delete enviroment after run
self.platform.delete_environment(environment)
self.set_status(ActionStatus.SUCCESS)
# delete enviroment after run
self.set_status(ActionStatus.SUCCESS)
finally:
if environment:
self.platform.delete_environment(environment)
async def stop(self) -> None:
super().stop()

Просмотреть файл

@ -10,8 +10,7 @@ from lisa.testsuite import (
_cases,
_suites,
)
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
class SelectorTestCase(TestCase):

Просмотреть файл

@ -4,8 +4,7 @@ from typing import Callable, Dict, List, Mapping, Optional, Pattern, Set, Union,
from lisa import schema
from lisa.testsuite import TestCaseData, TestCaseMetadata, get_cases_metadata
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util import LisaException, constants
from lisa.util.logger import get_logger
_get_logger = partial(get_logger, "init", "selector")

Просмотреть файл

@ -8,7 +8,7 @@ from functools import wraps
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Type
from lisa.action import Action, ActionStatus
from lisa.util.exceptions import LisaException
from lisa.util import LisaException
from lisa.util.logger import get_logger
from lisa.util.perf_timer import create_timer

Просмотреть файл

@ -2,7 +2,7 @@ import re
from typing import Tuple
from lisa.executable import Tool
from lisa.util.exceptions import LisaException
from lisa.util import LisaException
class Uname(Tool):

Просмотреть файл

@ -0,0 +1,31 @@
from pathlib import Path
from typing import Type, TypeVar
T = TypeVar("T")
class LisaException(Exception):
pass
class ContextMixin:
def get_context(self, context_type: Type[T]) -> T:
if not hasattr(self, "_context"):
self._context: T = context_type()
else:
assert isinstance(self._context, context_type)
return self._context
def get_public_key_data(private_key_file_path: str) -> str:
# TODO: support ppk, if it's needed.
private_key_path = Path(private_key_file_path)
if not private_key_path.exists:
raise LisaException(f"private key file not exist {private_key_file_path}")
public_key_path = private_key_path.parent / f"{private_key_path.name}.pub"
if not public_key_path.exists:
raise LisaException(f"private key file not exist {public_key_path}")
with open(public_key_path, "r") as fp:
public_key_data = fp.read()
return public_key_data

Просмотреть файл

@ -1,4 +1,5 @@
import re
from pathlib import Path, PurePath
# config types
CONFIG_RUNBOOK = "runbook"
@ -8,6 +9,10 @@ CONFIG_TEST_CASES = "testcases"
RUN_ID = ""
RUN_NAME = ""
CACHE_PATH: Path
RUN_LOCAL_PATH: Path
RUN_LOGIC_PATH: PurePath
# path related
PATH_REMOTE_ROOT = "lisa_working"
PATH_TOOL = "tool"

Просмотреть файл

@ -1,25 +0,0 @@
import os
from pathlib import PurePath
KEY_RUN_LOCAL_PATH = "RUN_LOCAL_PATH"
KEY_RUN_PATH = "RUN_PATH"
__prefix = "LISA_"
def get_run_local_path() -> PurePath:
return PurePath(get_env(KEY_RUN_LOCAL_PATH))
def get_run_path() -> PurePath:
return PurePath(get_env(KEY_RUN_PATH))
def set_env(name: str, value: str, is_secret: bool = False) -> None:
name = f"{__prefix}{name}"
os.environ[name] = value
def get_env(name: str) -> str:
name = f"{__prefix}{name}"
return os.environ[name]

Просмотреть файл

@ -1,2 +0,0 @@
class LisaException(Exception):
pass

Просмотреть файл

@ -3,7 +3,7 @@ import time
from functools import partial
from typing import Dict, List, Optional, Union, cast
from lisa.util.exceptions import LisaException
from lisa.util import LisaException
# to prevent circular import, hard code it here.
ENV_KEY_RUN_LOCAL_PATH = "LISA_RUN_LOCAL_PATH"

Просмотреть файл

@ -1,5 +1,7 @@
import logging
import os
import shutil
from logging import getLogger
from pathlib import Path, PurePath
from typing import Any, Mapping, Optional, Sequence, Union, cast
@ -7,7 +9,7 @@ import paramiko # type: ignore
import spur # type: ignore
import spurplus # type: ignore
from lisa.util.exceptions import LisaException
from lisa.util import LisaException
class ConnectionInfo:
@ -17,7 +19,7 @@ class ConnectionInfo:
port: int = 22,
username: str = "root",
password: Optional[str] = "",
private_key_file: str = "",
private_key_file: Optional[str] = None,
) -> None:
self.address = address
self.port = port
@ -30,12 +32,13 @@ class ConnectionInfo:
"at least one of password and privateKeyFile need to be set"
)
elif not self.private_key_file:
self._use_password = True
# use password
# spurplus doesn't process empty string correctly, use None
self.private_key_file = None
else:
if not Path(self.private_key_file).exists():
raise FileNotFoundError(self.private_key_file)
self.password = None
self._use_password = False
if not self.username:
raise LisaException("username must be set")
@ -48,6 +51,9 @@ class SshShell:
self._connection_info = connection_info
self._inner_shell: Optional[spurplus.SshShell] = None
paramiko_logger = getLogger("paramiko")
paramiko_logger.setLevel(logging.WARN)
def initialize(self) -> None:
self._inner_shell = spurplus.connect_with_retries(
self._connection_info.address,

36
poetry.lock сгенерированный
Просмотреть файл

@ -68,6 +68,19 @@ msal = ">=1.3.0,<2.0.0"
msal-extensions = ">=0.2.2,<0.3.0"
six = ">=1.6"
[[package]]
category = "main"
description = "Microsoft Azure Compute Management Client Library for Python"
name = "azure-mgmt-compute"
optional = false
python-versions = "*"
version = "17.0.0b1"
[package.dependencies]
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.0.0,<2.0.0"
msrest = ">=0.5.0"
[[package]]
category = "main"
description = "Microsoft Azure Management Core Library for Python"
@ -79,6 +92,19 @@ version = "1.2.0"
[package.dependencies]
azure-core = ">=1.7.0.dev,<2.0.0"
[[package]]
category = "main"
description = "Microsoft Azure Network Management Client Library for Python"
name = "azure-mgmt-network"
optional = false
python-versions = "*"
version = "16.0.0b1"
[package.dependencies]
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.0.0,<2.0.0"
msrest = ">=0.5.0"
[[package]]
category = "main"
description = "Microsoft Azure Resource Management Client Library for Python"
@ -870,7 +896,7 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
[metadata]
content-hash = "e6acb4c0b78ec5fe460271a163aba570ba742d4522fd11e6376e806a18958bef"
content-hash = "e86f7df7749f254cd879bc96228add6970bd6e878d945664ce96f5cac6af08e3"
python-versions = "^3.8"
[metadata.files]
@ -898,10 +924,18 @@ azure-identity = [
{file = "azure-identity-1.4.0.zip", hash = "sha256:820e1f3e21f90d36063239c6cb7ca9a6bb644cb120a6b1ead3081cafdf6ceaf8"},
{file = "azure_identity-1.4.0-py2.py3-none-any.whl", hash = "sha256:92ccea6c6ac7724d186cb73422d1ad8f525202dce2bdc17f35c695948fadf222"},
]
azure-mgmt-compute = [
{file = "azure-mgmt-compute-17.0.0b1.zip", hash = "sha256:f0a5827a5c8f8219fdb4008dc104b5e4ca5a944179d35b097150b8017ece4f1b"},
{file = "azure_mgmt_compute-17.0.0b1-py2.py3-none-any.whl", hash = "sha256:62be422c34d472693103a158fc6929c3355adc99a373dce7803cab8c16a2861a"},
]
azure-mgmt-core = [
{file = "azure-mgmt-core-1.2.0.zip", hash = "sha256:8fe3b59446438f27e34f7b24ea692a982034d9e734617ca1320eedeee1939998"},
{file = "azure_mgmt_core-1.2.0-py2.py3-none-any.whl", hash = "sha256:6966226111e92dff26d984aa1c76f227ce0e8b2069c45c72cfb67f160c452444"},
]
azure-mgmt-network = [
{file = "azure-mgmt-network-16.0.0b1.zip", hash = "sha256:f84b997bf231eca92d31349a0c89027a083a53308628dfa7ac7b26b10b963a71"},
{file = "azure_mgmt_network-16.0.0b1-py2.py3-none-any.whl", hash = "sha256:e6d3bdb5142a54a7a2842a73e597f219faa15d1e20af6383252df87a0096109b"},
]
azure-mgmt-resource = [
{file = "azure-mgmt-resource-15.0.0b1.zip", hash = "sha256:e7d2c514f1ce14ccd6ddb75398625b4784a784eb7de052b10ac446438959795e"},
{file = "azure_mgmt_resource-15.0.0b1-py2.py3-none-any.whl", hash = "sha256:e294d22c42da23a94cb00998ab91d96b293efedc24427a88fdafd5ed70997abf"},

Просмотреть файл

@ -17,6 +17,8 @@ dataclasses-json = "^0.5.2"
portalocker = "^1.7.1"
azure-identity = {version = "^1.4.0", allow-prereleases = true}
azure-mgmt-resource = {version = "^15.0.0-beta.1", allow-prereleases = true}
azure-mgmt-compute = {version = "^17.0.0-beta.1", allow-prereleases = true}
azure-mgmt-network = {version = "^16.0.0-beta.1", allow-prereleases = true}
[tool.poetry.dev-dependencies]
black = "^19.10b0"