use strong type schema, init azure platform code

1. use dataclasses as base schema, dataclasses_json for serialization,
   and marshmallow for validation.
2. remove yaml format schema, add python schema.
3. use strong type to replace dict based code.
4. add platform schema and node schema for platform extension.
5. add _initialize for platform
6. init azure platform code.
7. other small improvements.
This commit is contained in:
Chi Song 2020-08-20 12:04:55 +08:00
Родитель 18b0f902c2
Коммит 145877ff63
24 изменённых файлов: 1442 добавлений и 690 удалений

Просмотреть файл

@ -27,17 +27,18 @@ jobs:
- name: Install Poetry for Linux
if: runner.os == 'Linux'
run: |
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - --preview --version 1.1.0a2
echo "::add-path::$HOME/.poetry/bin"
- name: Install Poetry for Windows
if: runner.os == 'Windows'
run: |
(Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python
(Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python - --preview --version 1.1.0a2
echo "::add-path::$env:USERPROFILE\.poetry\bin"
- name: Install Python dependencies
run: poetry install
run: |
poetry install
- name: Run LISAv3 hello-world
run: poetry run python lisa/main.py --debug

Просмотреть файл

@ -155,6 +155,14 @@ mypys [cheat sheet][].
[intro]: https://kishstats.com/python/2019/01/07/python-type-hinting.html
[cheat sheet]: https://mypy.readthedocs.io/en/latest/cheat_sheet_py3.html
## Runbook schema
Some plugins like Platform need follow this section to extend runbook schema. Runbook is the configurations of LISA runs. Every LISA run need a runbook.
The runbook uses [dataclass](https://docs.python.org/3/library/dataclasses.html) to define, [dataclass-json](https://github.com/lidatong/dataclasses-json/) to deserialize, and [marshmallow](https://marshmallow.readthedocs.io/en/3.0/api_reference.html) to validate the schema.
See more examples in [schema.py](lisa/schema.py), if you need to extend runbook schema.
## Committing Guidelines
A best practice when using [Git](https://git-scm.com/book/en/v2) is to create a

Просмотреть файл

@ -32,6 +32,12 @@ On Windows (in PowerShell):
$env:PATH += ";$env:USERPROFILE\.poetry\bin"
```
TEMP WORKAROUND: poetry cannot handle azure-identity package in current version (1.0.10), so install the preview version, until it's released.
```bash
poetry self update --preview 1.1.0a2
```
Then use Poetry to install LISA v3's Python package dependencies:
```

Просмотреть файл

@ -4,9 +4,10 @@ from argparse import Namespace
from pathlib import Path, PurePath
from typing import Dict, Iterable, List, Optional, cast
import lisa.parameter_parser.config as config_ops
from lisa.environment import environments, load_environments
from lisa.parameter_parser.config import Config, parse_to_config
from lisa.platform_ import initialize_platforms, platforms
from lisa.platform_ import initialize_platforms, load_platforms, platforms
from lisa.schema import Config
from lisa.sut_orchestrator.ready import ReadyPlatform
from lisa.test_runner.lisarunner import LISARunner
from lisa.testselector import select_testcases
@ -28,29 +29,37 @@ def _load_extends(base_path: Path, extends_config: Dict[str, object]) -> None:
def _initialize(args: Namespace) -> Iterable[TestCaseData]:
# make sure extension in lisa is loaded
base_module_path = Path(__file__).parent
import_module(base_module_path, logDetails=False)
initialize_platforms()
# merge all parameters
config = parse_to_config(args)
path = Path(args.config).absolute()
data = config_ops.load(path)
# load external extension
_load_extends(config.base_path, config.extension)
# load extended modules
if constants.EXTENSION in data:
_load_extends(path.parent, data[constants.EXTENSION])
# validate config, after extensions loaded
config = config_ops.validate(data)
log = _get_init_logger()
constants.RUN_NAME = f"lisa_{config.name}_{constants.RUN_ID}"
log.info(f"run name is {constants.RUN_NAME}")
# initialize environment
load_environments(config.environment)
# initialize platform
initialize_platforms(config.platform)
load_platforms(config.platform)
# filter test cases
selected_cases = select_testcases(config.testcase)
_validate(config)
log = _get_init_logger()
log.info(f"selected cases: {len(list(selected_cases))}")
return selected_cases
@ -93,18 +102,13 @@ def list_start(args: Namespace) -> None:
def _validate(config: Config) -> None:
environment_config = config.environment
warn_as_error = False
if environment_config:
warn_as_error = cast(
bool, environment_config.get(constants.WARN_AS_ERROR, False)
)
log = _get_init_logger()
for environment in environments.values():
if environment.spec is not None and isinstance(
platforms.default, ReadyPlatform
):
log.warn_or_raise(
warn_as_error, "the ready platform cannot process environment spec"
)
if config.environment:
log = _get_init_logger()
for environment in environments.values():
if environment.data is not None and isinstance(
platforms.default, ReadyPlatform
):
log.warn_or_raise(
config.environment.warn_as_error,
"the ready platform cannot process environment spec",
)

Просмотреть файл

@ -3,10 +3,10 @@ from __future__ import annotations
import copy
from collections import UserDict
from functools import partial
from typing import TYPE_CHECKING, Dict, List, Optional, cast
from typing import TYPE_CHECKING, Optional
from lisa import schema
from lisa.node import Nodes
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util.logger import get_logger
@ -25,67 +25,50 @@ class Environment(object):
self.name: str = ""
self.is_ready: bool = False
self.platform: Optional[Platform] = None
self.spec: Optional[Dict[str, object]] = None
self.data: Optional[schema.Environment] = None
self._default_node: Optional[Node] = None
self._log = get_logger("env", self.name)
@staticmethod
def load(config: Dict[str, object]) -> Environment:
def load(environment_data: schema.Environment) -> Environment:
environment = Environment()
spec = copy.deepcopy(config)
environment.name = cast(str, spec.get(constants.NAME, ""))
environment.name = environment_data.name
has_default_node = False
nodes_spec = []
nodes_config = cast(
List[Dict[str, object]], spec.get(constants.ENVIRONMENTS_NODES)
)
for node_config in nodes_config:
node = environment.nodes.create_by_config(node_config)
if not node:
# it's a spec
nodes_spec.append(node_config)
if environment_data.nodes:
for node_data in environment_data.nodes:
node = environment.nodes.from_data(node_data)
if not node:
# it's a spec
nodes_spec.append(node_data)
is_default = cast(Optional[bool], node_config.get(constants.IS_DEFAULT))
has_default_node = environment._validate_single_default(
has_default_node, node_data.is_default
)
# validate template and node not appear together
if environment_data.template is not None:
is_default = environment_data.template.is_default
has_default_node = environment._validate_single_default(
has_default_node, is_default
)
# validate template and node not appear together
nodes_template = cast(
List[Dict[str, object]], spec.get(constants.ENVIRONMENTS_TEMPLATE)
)
if nodes_template is not None:
for item in nodes_template:
node_count = cast(
Optional[int], item.get(constants.ENVIRONMENTS_TEMPLATE_NODE_COUNT)
)
if node_count is None:
node_count = 1
else:
del item[constants.ENVIRONMENTS_TEMPLATE_NODE_COUNT]
is_default = cast(Optional[bool], item.get(constants.IS_DEFAULT))
has_default_node = environment._validate_single_default(
has_default_node, is_default
)
for i in range(node_count):
copied_item = copy.deepcopy(item)
# only one default node for template also
if is_default and i > 0:
del copied_item[constants.IS_DEFAULT]
nodes_spec.append(copied_item)
del spec[constants.ENVIRONMENTS_TEMPLATE]
for i in range(environment_data.template.node_count):
copied_item = copy.deepcopy(environment_data.template)
# only one default node for template also
if is_default and i > 0:
copied_item.is_default = False
nodes_spec.append(copied_item)
environment_data.template = None
if len(nodes_spec) == 0 and len(environment.nodes) == 0:
raise LisaException("not found any node in environment")
spec[constants.ENVIRONMENTS_NODES] = nodes_spec
environment_data.nodes = nodes_spec
environment.spec = spec
environment._log.debug(f"environment spec is {environment.spec}")
environment.data = environment_data
environment._log.debug(f"environment data is {environment.data}")
return environment
@property
@ -105,19 +88,15 @@ class Environment(object):
return has_default
def load_environments(config: Dict[str, object]) -> None:
if not config:
raise LisaException("environment section must be set in config")
environments.max_concurrency = cast(
int, config.get(constants.ENVIRONMENT_MAX_CONCURRENCY, 1)
)
environments_config = cast(
List[Dict[str, object]], config.get(constants.ENVIRONMENTS)
)
def load_environments(environment_root_data: Optional[schema.EnvironmentRoot]) -> None:
if not environment_root_data:
return
environments.max_concurrency = environment_root_data.max_concurrency
environments_data = environment_root_data.environments
without_name: bool = False
log = _get_init_logger()
for environment_config in environments_config:
environment = Environment.load(environment_config)
for environment_data in environments_data:
environment = Environment.load(environment_data)
if not environment.name:
if without_name:
raise LisaException("at least two environments has no name")
@ -128,15 +107,15 @@ def load_environments(config: Dict[str, object]) -> None:
if TYPE_CHECKING:
EnvironmentDict = UserDict[str, Environment]
EnvironmentsDict = UserDict[str, Environment]
else:
EnvironmentDict = UserDict
EnvironmentsDict = UserDict
class Environments(EnvironmentDict):
class Environments(EnvironmentsDict):
def __init__(self) -> None:
super().__init__()
self.max_concurrency = 1
self.max_concurrency: int = 1
def __getitem__(self, k: Optional[str] = None) -> Environment:
if k is None:

Просмотреть файл

@ -1,7 +1,6 @@
from __future__ import annotations
import pathlib
import re
from abc import ABC, abstractmethod
from hashlib import sha256
from typing import TYPE_CHECKING, Dict, List, Optional, Type, TypeVar, Union, cast
@ -327,8 +326,6 @@ class CustomScriptBuilder:
It needs some special handling in tool.py, but not much.
"""
_normalize_pattern = re.compile(r"[^\w]|\d")
def __init__(
self,
root_path: pathlib.Path,
@ -370,7 +367,7 @@ class CustomScriptBuilder:
command_identifier = files[0]
# generate an unique name based on file names
command_identifier = self._normalize_pattern.sub("_", command_identifier)
command_identifier = constants.NORMALIZE_PATTERN.sub("_", command_identifier)
hash_source = "".join(files).encode("utf-8")
hash_result = sha256(hash_source)
self.name = f"custom_{command_identifier}_{hash_result.hexdigest()}".lower()

Просмотреть файл

@ -6,7 +6,7 @@ from pathlib import Path
from retry import retry # type: ignore
from lisa.parameter_parser.argparser import parse_args
from lisa.util import env
from lisa.util import constants, env
from lisa.util.logger import get_logger, set_level, set_log_file
@ -27,6 +27,8 @@ def main() -> None:
run_path = create_run_path(local_path)
local_path = local_path.joinpath(run_path)
local_path.mkdir(parents=True)
constants.RUN_ID = run_path.name
env.set_env(env.KEY_RUN_LOCAL_PATH, str(local_path))
env.set_env(env.KEY_RUN_PATH, str(run_path))

Просмотреть файл

@ -5,8 +5,9 @@ import random
from collections import UserDict
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeVar, Union, cast
from lisa import schema
from lisa.executable import Tools
from lisa.tool import Echo, Uname
from lisa.tools import Echo, Uname
from lisa.util import constants, env
from lisa.util.exceptions import LisaException
from lisa.util.logger import get_logger
@ -28,12 +29,13 @@ class Node:
"""
id_: passed in by platform, uses to associate with resource in platform
"""
self.is_default = is_default
self.is_remote = is_remote
self.spec = spec
self.name: str = ""
self.index = index
self.id = id_
self.id_ = id_
self.shell: Shell = LocalShell()
@ -73,35 +75,19 @@ class Node:
self,
address: str = "",
port: int = 22,
publicAddress: str = "",
publicPort: int = 22,
public_address: str = "",
public_port: int = 22,
username: str = "root",
password: str = "",
privateKeyFile: str = "",
private_key_file: str = "",
) -> None:
if self._connection_info is not None:
raise LisaException(
"node is set connection information already, cannot set again"
)
if not address and not publicAddress:
raise LisaException(
"at least one of address and publicAddress need to be set"
)
elif not address:
address = publicAddress
elif not publicAddress:
publicAddress = address
if not port and not publicPort:
raise LisaException("at least one of port and publicPort need to be set")
elif not port:
port = publicPort
elif not publicPort:
publicPort = port
self._connection_info = ConnectionInfo(
publicAddress, publicPort, username, password, privateKeyFile,
public_address, public_port, username, password, private_key_file,
)
self.shell = SshShell(self._connection_info)
self.internal_address = address
@ -224,12 +210,12 @@ class Node:
if TYPE_CHECKING:
NodeDict = UserDict[str, Node]
NodesDict = UserDict[str, Node]
else:
NodeDict = UserDict
NodesDict = UserDict
class Nodes(NodeDict):
class Nodes(NodesDict):
def __init__(self) -> None:
self._default: Optional[Node] = None
self._list: List[Node] = list()
@ -278,8 +264,11 @@ class Nodes(NodeDict):
for node in self._list:
node.close()
def create_by_config(self, config: Dict[str, object]) -> Optional[Node]:
node_type = cast(str, config.get(constants.TYPE))
def from_data(
self, node_data: Union[schema.LocalNode, schema.RemoteNode, schema.NodeSpec]
) -> Optional[Node]:
node_type = node_data.type
node = None
if node_type is None:
raise LisaException("type of node shouldn't be None")
@ -287,9 +276,8 @@ class Nodes(NodeDict):
constants.ENVIRONMENTS_NODES_LOCAL,
constants.ENVIRONMENTS_NODES_REMOTE,
]:
is_default = cast(bool, config.get(constants.IS_DEFAULT, False))
node = Node.create(
len(self._list), node_type=node_type, is_default=is_default
len(self._list), node_type=node_type, is_default=node_data.is_default
)
self._list.append(node)
if node.is_remote:
@ -300,12 +288,13 @@ class Nodes(NodeDict):
constants.ENVIRONMENTS_NODES_REMOTE_PUBLIC_PORT,
constants.ENVIRONMENTS_NODES_REMOTE_USERNAME,
constants.ENVIRONMENTS_NODES_REMOTE_PASSWORD,
constants.ENVIRONMENTS_NODES_REMOTE_PRIVATEKEYFILE,
constants.ENVIRONMENTS_NODES_REMOTE_PRIVATE_KEY_FILE,
]
parameters: Dict[str, Any] = dict()
for key in config:
if key in fields:
parameters[key] = cast(str, config[key])
for field in fields:
value = getattr(node_data, field)
if value is not None:
parameters[field] = value
node.set_connection_info(**parameters)
return node

Просмотреть файл

@ -1,64 +1,24 @@
from argparse import Namespace
from collections import UserDict
from dataclasses import field, make_dataclass
from pathlib import Path
from typing import TYPE_CHECKING, Dict, List, Optional, cast
from typing import Any, List, Optional, Tuple, Type, cast
import yaml
from marshmallow import Schema
from marshmallow import validate as marshmallow_validate
from lisa.schema import validate_config
from lisa import schema
from lisa.platform_ import platforms
from lisa.util import constants
from lisa.util.logger import get_logger
if TYPE_CHECKING:
ConfigDict = UserDict[str, object]
else:
ConfigDict = UserDict
_schema: Optional[Schema] = None
class Config(ConfigDict):
def __init__(
self,
base_path: Optional[Path] = None,
config: Optional[Dict[str, object]] = None,
) -> None:
super().__init__()
if base_path is not None:
self.base_path = base_path
if config is not None:
self._config: Dict[str, object] = config
def validate(self) -> None:
# TODO implement config validation
pass
@property
def extension(self) -> Dict[str, object]:
return self._get_and_cast(constants.EXTENSION)
@property
def environment(self) -> Dict[str, object]:
return self._get_and_cast(constants.ENVIRONMENT)
@property
def platform(self) -> List[Dict[str, object]]:
return cast(
List[Dict[str, object]], self._config.get(constants.PLATFORM, list())
)
@property
def testcase(self) -> Dict[str, object]:
return self._get_and_cast(constants.TESTCASE)
# TODO: This is a hack to get around our data not being
# structured. Since we generally know the type of the data were
# trying to get, this indicates that we need to properly structure
# said data. Doing so correctly will enable us to delete this.
def _get_and_cast(self, name: str) -> Dict[str, object]:
return cast(Dict[str, object], self._config.get(name, dict()))
def parse_to_config(args: Namespace) -> Config:
path = Path(args.config).absolute()
def load(path: Path) -> Any:
"""
load config, not to validate it, since some extended schemas are not ready
before extended modules imported.
"""
log = get_logger("parser")
log.info(f"load config from: {path}")
@ -68,10 +28,100 @@ def parse_to_config(args: Namespace) -> Config:
with open(path, "r") as file:
data = yaml.safe_load(file)
# load schema
validate_config(data)
log.debug(f"final config data: {data}")
base_path = path.parent
log.debug(f"base path is {base_path}")
return Config(base_path, data)
return data
def validate(data: Any) -> schema.Config:
_load_platform_schema()
global _schema
if not _schema:
_schema = schema.Config.schema() # type:ignore
assert _schema
config = cast(schema.Config, _schema.load(data))
return config
def _set_schema_class(
schema_type: Type[Any], updated_fields: Optional[List[Tuple[str, Any, Any]]] = None
) -> None:
if updated_fields is None:
updated_fields = []
setattr(
schema,
schema_type.__name__,
make_dataclass(
schema_type.__name__, fields=updated_fields, bases=(schema_type,),
),
)
def _load_platform_schema() -> None:
# load platform extensions
platform_fields: List[Tuple[str, Any, Any]] = []
platform_field_names: List[str] = []
# 1. discover extension schemas and construct new field
for platform in platforms.values():
platform_schema = platform.platform_schema
if platform_schema:
platform_type_name = platform.platform_type()
platform_field = (
platform_type_name,
Optional[platform_schema],
field(default=None),
)
platform_fields.append(platform_field)
platform_field_names.append(platform_type_name)
# 2. refresh data class in schema platform and environment
if platform_fields:
# add in platform type
platform_with_type_fields = platform_fields.copy()
platform_field_names.append(constants.PLATFORM_READY)
type_field = (
constants.TYPE,
str,
field(
default=constants.PLATFORM_READY,
metadata=schema.metadata(
required=True,
validate=marshmallow_validate.OneOf(platform_field_names),
),
),
)
platform_with_type_fields.append(type_field)
# refresh platform
_set_schema_class(schema.Platform, platform_with_type_fields)
schema.Platform.supported_types = platform_field_names
# refresh node spec, template, and chain dataclasses
_set_schema_class(schema.NodeSpec, platform_fields)
_set_schema_class(schema.Template, platform_fields)
template_in_config = (
constants.ENVIRONMENTS_TEMPLATE,
Optional[schema.Template],
field(default=None),
)
_set_schema_class(schema.Environment, [template_in_config])
platform_spec_in_config = (
constants.ENVIRONMENTS,
Optional[List[schema.Environment]],
field(default=None),
)
_set_schema_class(schema.EnvironmentRoot, [platform_spec_in_config])
platform_in_config = (
constants.PLATFORM,
List[schema.Platform],
field(default_factory=list),
)
environment_in_config = (
constants.ENVIRONMENT,
Optional[schema.EnvironmentRoot],
field(default=None),
)
_set_schema_class(schema.Config, [platform_in_config, environment_in_config])

Просмотреть файл

@ -2,8 +2,10 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from collections import UserDict
from typing import TYPE_CHECKING, Dict, List, Optional, Type, cast
from functools import partial
from typing import TYPE_CHECKING, Any, List, Optional, Type, cast
from lisa import schema
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util.logger import get_logger
@ -11,43 +13,79 @@ from lisa.util.logger import get_logger
if TYPE_CHECKING:
from lisa.environment import Environment
_get_init_logger = partial(get_logger, "init", "platform")
class Platform(ABC):
def __init__(self) -> None:
self._log = get_logger("platform", self.platform_type())
self.__is_initialized = False
@classmethod
@abstractmethod
def platform_type(cls) -> str:
raise NotImplementedError()
@abstractmethod
def config(self, key: str, value: object) -> None:
@property
def platform_schema(self) -> Optional[Type[Any]]:
return None
@property
def node_schema(self) -> Optional[Type[Any]]:
return None
def _config(self, key: str, value: object) -> None:
pass
def _initialize(self) -> None:
"""
Uses to do some initialization work.
It will be called when first environment is requested.
"""
pass
@abstractmethod
def _request_environment_internal(self, environment: Environment) -> Environment:
def _request_environment(self, environment: Environment) -> Environment:
raise NotImplementedError()
@abstractmethod
def _delete_environment_internal(self, environment: Environment) -> None:
def _delete_environment(self, environment: Environment) -> None:
raise NotImplementedError()
def config(self, key: str, value: Any) -> None:
if key == constants.CONFIG_CONFIG:
# store platform specified config, if there is.
self._root_config = cast(schema.Platform, value)
self._config(key, value)
def request_environment(self, environment: Environment) -> Environment:
environment = self._request_environment_internal(environment)
if not self.__is_initialized:
self._log.debug("initializing...")
self._initialize()
self._is_initialized = True
self._log.debug("initialized")
self._log.info(f"requesting environment {environment.name}")
environment = self._request_environment(environment)
environment.is_ready = True
self._log.info(f"requested environment {environment.name}")
return environment
def delete_environment(self, environment: Environment) -> None:
self._log.info(f"environment {environment.name} deleting")
environment.close()
self._delete_environment_internal(environment)
self._delete_environment(environment)
environment.is_ready = False
self._log.info(f"environment {environment.name} deleted")
if TYPE_CHECKING:
PlatformDict = UserDict[str, Platform]
PlatformsDict = UserDict[str, Platform]
else:
PlatformDict = UserDict
PlatformsDict = UserDict
class Platforms(PlatformDict):
class Platforms(PlatformsDict):
def __init__(self) -> None:
super().__init__()
self._default: Optional[Platform] = None
@ -62,7 +100,7 @@ class Platforms(PlatformDict):
self._default = value
def register_platform(self, platform: Type[Platform]) -> None:
platform_type = platform.platform_type().lower()
platform_type = platform.platform_type()
if platforms.get(platform_type) is None:
platforms[platform_type] = platform()
else:
@ -71,33 +109,37 @@ class Platforms(PlatformDict):
)
def initialize_platforms(config: List[Dict[str, object]]) -> None:
if not config:
raise LisaException("cannot find platform")
def load_platforms(platforms_config: List[schema.Platform]) -> None:
log = _get_init_logger()
# we may extend it later to support multiple platforms
platform_count = len(config)
platform_count = len(platforms_config)
if platform_count != 1:
raise LisaException("There must be 1 and only 1 platform")
platform_type = cast(Optional[str], config[0].get(constants.TYPE))
if platform_type is None:
raise LisaException("type of platfrom shouldn't be None")
default_platform: Optional[Platform] = None
for platform_config in platforms_config:
platform_type = platform_config.type
platform = platforms.get(platform_type)
if platform is None:
raise LisaException(f"cannot find platform type '{platform_type}'")
if default_platform is None:
default_platform = platform
log.info(f"activated platform '{platform_type}'")
platform.config(constants.CONFIG_CONFIG, platform_config)
assert default_platform
platforms.default = default_platform
def initialize_platforms() -> None:
for sub_class in Platform.__subclasses__():
platform_class = cast(Type[Platform], sub_class)
platforms.register_platform(platform_class)
log = get_logger("init", "platform")
log = _get_init_logger()
log.debug(
f"registered platforms: " f"[{', '.join([name for name in platforms.keys()])}]"
)
platform = platforms.get(platform_type.lower())
if platform is None:
raise LisaException(f"cannot find platform type '{platform_type}'")
log.info(f"activated platform '{platform_type}'")
platform.config(constants.CONFIG_CONFIG, config[0])
platforms.default = platform
platforms = Platforms()

Просмотреть файл

@ -1,29 +1,474 @@
import logging
from pathlib import Path
from typing import Any, Dict, Optional, cast
from dataclasses import dataclass, field
from dataclasses import fields as dataclass_fields
from typing import Any, Callable, ClassVar, List, Optional, Type, TypeVar, Union
import yaml
from cerberus import Validator # type: ignore
from dataclasses_json import ( # type: ignore
DataClassJsonMixin,
LetterCase,
config,
dataclass_json,
)
from marshmallow import fields, validate
from lisa.util import constants
from lisa.util.exceptions import LisaException
from lisa.util.logger import get_logger
_schema: Optional[Dict[str, object]] = None
"""
Schema is dealt with three components,
1. dataclasses. It's a builtin class, uses to define schema of an instance. field()
function uses to describe a field.
2. dataclasses_json. Serializer. config() function customizes this component.
3. marshmallow. Validator. It's wrapped by dataclasses_json. config(mm_field=xxx)
function customizes this component.
"""
def validate_config(data: Any) -> None:
v = Validator(_load_schema())
is_success = v.validate(data)
if not is_success:
log = get_logger("init", "schema")
log.lines(level=logging.ERROR, content=v.errors)
raise LisaException("met validation errors, see error log for details")
def metadata(
field_function: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any
) -> Any:
"""
wrap for shorter
"""
if field_function is None:
field_function = fields.Raw
assert field_function
return config(mm_field=field_function(*args, **kwargs))
def _load_schema() -> Dict[str, object]:
global _schema
if not _schema:
schema_path = Path(__file__).parent.joinpath("schema.yml")
with open(schema_path, "r") as f:
_schema = cast(Dict[str, object], yaml.safe_load(f))
return _schema
T = TypeVar("T", bound=DataClassJsonMixin)
class ExtendableSchemaMixin:
def get_extended_schema(
self, schema_type: Type[T], schema_name: str = constants.TYPE
) -> T:
assert issubclass(
schema_type, DataClassJsonMixin
), "schema_type must annotate from DataClassJsonMixin"
assert hasattr(self, schema_name), f"cannot find attr '{schema_name}'"
customized_config = getattr(self, schema_name)
if not isinstance(customized_config, schema_type):
raise LisaException(
f"schema type mismatch, expected type: {schema_type}"
f"data: {customized_config}"
)
return customized_config
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Strategy:
"""
for simple merge, this part is optional.
operations include:
overwrite: default behavior. add non-exist items and replace exist.
remove: remove specified path totally.
add: add non-exist, not replace exist.
"""
path: str = field(default="", metadata=metadata(required=True))
operation: str = field(
default=constants.OPERATION_OVERWRITE,
metadata=metadata(
required=True,
validate=validate.OneOf(
[
constants.OPERATION_ADD,
constants.OPERATION_OVERWRITE,
constants.OPERATION_REMOVE,
]
),
),
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Parent:
"""
share configurations for similar runs.
"""
path: str = field(default="", metadata=metadata(required=True))
strategy: List[Strategy] = field(
default_factory=list, metadata=metadata(required=True),
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Extension:
"""
add extended classes can be put in folders and include here. it doesn't matter how
those files are organized, lisa loads by their inherits relationship. if there is
any conflict on type name, there should be an error message.
"""
paths: List[str] = field(default_factory=list, metadata=metadata(required=True))
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Variable:
"""
it uses to support variables in other fields.
duplicate items will be overwritten one by one.
if a variable is not defined here, LISA can fail earlier to ask check it.
file path is relative to LISA command starts.
"""
# If it's secret, it will be removed from log and other output information.
# secret files also need to be removed after test
# it's not recommended highly to put secret in configurations directly.
is_secret: bool = False
# continue to support v2 format. it's simple.
file: str = field(
default="",
metadata=metadata(validate=validate.Regexp(r"[\w\W]+[.](xml|yml|yaml)$")),
)
name: str = field(default="")
value: str = field(default="")
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if self.file and (self.name or self.value):
raise LisaException(
f"file cannot be specified with name or value"
f"file: '{self.file}'"
f"name: '{self.name}'"
f"value: '{self.value}'"
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class ArtifactLocation:
type: str = field(
default="", metadata=metadata(required=True, validate=validate.OneOf([])),
)
path: str = field(default="", metadata=metadata(required=True))
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Artifact:
# name is optional. artifacts can be referred by name or index.
name: str = ""
type: str = field(
default="", metadata=metadata(required=True, validate=validate.OneOf([])),
)
locations: List[ArtifactLocation] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Notifier:
"""
it sends test progress and results to any place wanted.
detail types are defined in notifier itself, allowed items are handled in code.
"""
type: str = field(
default="", metadata=metadata(required=True, validate=validate.OneOf([])),
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class LocalNode:
type: str = field(
default=constants.ENVIRONMENTS_NODES_LOCAL,
metadata=metadata(
required=True,
validate=validate.OneOf([constants.ENVIRONMENTS_NODES_LOCAL]),
),
)
name: str = ""
is_default: bool = field(default=False)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class RemoteNode:
type: str = field(
default=constants.ENVIRONMENTS_NODES_REMOTE,
metadata=metadata(
required=True,
validate=validate.OneOf([constants.ENVIRONMENTS_NODES_REMOTE]),
),
)
name: str = ""
is_default: bool = field(default=False)
address: str = ""
port: int = field(
default=1, metadata=metadata(validate=validate.Range(min=1, max=65535))
)
public_address: str = ""
public_port: int = field(
default=1,
metadata=metadata(
data_key="publicPort", validate=validate.Range(min=1, max=65535)
),
)
username: str = field(default="", metadata=metadata(required=True))
password: str = ""
private_key_file: str = ""
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if not self.address and not self.public_address:
raise LisaException(
"at least one of address and publicAddress need to be set"
)
elif not self.address:
self.address = self.public_address
elif not self.public_address:
self.public_address = self.address
if not self.port and not self.public_port:
raise LisaException("at least one of port and publicPort need to be set")
elif not self.port:
self.port = self.public_port
elif not self.public_port:
self.public_port = self.port
if not self.password and not self.private_key_file:
raise LisaException(
"at least one of password and privateKeyFile need to be set"
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class NodeSpec(ExtendableSchemaMixin):
type: str = field(
default=constants.ENVIRONMENTS_NODES_SPEC,
metadata=metadata(
required=True, validate=validate.OneOf([constants.ENVIRONMENTS_NODES_SPEC]),
),
)
name: str = ""
is_default: bool = field(default=False)
# optional, if there is only one artifact.
artifact: str = field(default="")
cpu_count: int = field(
default=1,
metadata=metadata(data_key="cpuCount", validate=validate.Range(min=1)),
)
memory_gb: int = field(
default=1,
metadata=metadata(data_key="memoryGb", validate=validate.Range(min=1)),
)
gpu_count: int = field(
default=0,
metadata=metadata(data_key="gpuCount", validate=validate.Range(min=0)),
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Template(NodeSpec):
node_count: int = field(
default=1,
metadata=metadata(data_key="nodeCount", validate=validate.Range(min=1)),
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Environment:
name: str = field(default="")
# the environment spec may not be fully supported by each platform.
# If so, there is a warning message.
# Environment spec can be forced to apply, as error is loud.
topology: str = field(
default=constants.ENVIRONMENTS_SUBNET,
metadata=metadata(validate=validate.OneOf([constants.ENVIRONMENTS_SUBNET])),
)
# template and nodes conflicts, they should have only one.
# it uses to prevent duplicate content for big amount nodes.
template: Optional[Template] = field(default=None)
# field_name is a config level variable, so use config directly.
_nodes_raw: Optional[List[Any]] = field(
default=None, metadata=metadata(data_key=constants.NODES),
)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if self.template and self._nodes_raw:
raise LisaException("cannot specify tempate and nodes both")
if self._nodes_raw:
# dataclasses_json cannot handle Union well, so manual handle it
self.nodes: List[Union[NodeSpec, LocalNode, RemoteNode]] = []
for node_raw in self._nodes_raw:
if node_raw[constants.TYPE] == constants.ENVIRONMENTS_NODES_LOCAL:
node: Union[
NodeSpec, LocalNode, RemoteNode
] = LocalNode.schema().load( # type:ignore
node_raw
)
elif node_raw[constants.TYPE] == constants.ENVIRONMENTS_NODES_REMOTE:
node = RemoteNode.schema().load(node_raw) # type:ignore
elif node_raw[constants.TYPE] == constants.ENVIRONMENTS_NODES_SPEC:
node = NodeSpec.schema().load(node_raw) # type:ignore
else:
raise LisaException(
f"unknown config type '{type(config)}': {config}"
)
self.nodes.append(node)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class EnvironmentRoot:
max_concurrency: int = field(
default=1,
metadata=metadata(data_key="maxConcurrency", validate=validate.Range(min=1)),
)
warn_as_error: bool = field(default=False)
environments: List[Environment] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Platform(ExtendableSchemaMixin):
type: str = field(
default=constants.PLATFORM_READY,
metadata=metadata(
required=True, validate=validate.OneOf([constants.PLATFORM_READY]),
),
)
supported_types: ClassVar[List[str]] = [constants.PLATFORM_READY]
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
platform_fields = dataclass_fields(self)
# get type field to analyze if mismatch type info is set.
for platform_field in platform_fields:
value = getattr(self, platform_field.name)
if (
value is not None
and platform_field.name in self.supported_types
and platform_field.name != self.type
):
raise LisaException(
f"platform type '{self.type}' and extension "
f"'{platform_field.name}' mismatch"
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Criteria:
"""
all rules in same criteria are AND condition.
we may support richer conditions later.
match case by name pattern
"""
name: Optional[str] = None
area: Optional[str] = None
category: Optional[str] = None
# the schema is complex to convert, so need manual overwrite it.
priority: Optional[Union[int, List[int]]] = field(default=None)
# tag is a simple way to include test cases within same topic.
tag: Optional[Union[str, List[str]]] = field(default=None)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if isinstance(self.priority, int):
if self.priority < 0 or self.priority > 3:
raise LisaException(
f"priority range should be 0 to 3, but '{self.priority}'"
)
elif isinstance(self.priority, list):
for priority in self.priority:
if priority < 0 or priority > 3:
raise LisaException(
f"priority range should be 0 to 3, but '{priority}'"
)
elif self.priority is not None:
raise LisaException(
f"priority must be integer, but '{self.priority}' "
f"is '{type(self.priority)}'"
)
if isinstance(self.tag, list):
for tag in self.tag:
assert isinstance(
tag, str
), f"tag must be str, but '{tag}' is '{type(tag)}'"
elif not isinstance(self.tag, str):
if self.tag is not None:
raise LisaException(
f"tag must be str, but '{self.tag}' is '{type(self.tag)}'"
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class TestCase:
"""
rules apply ordered on previous selection.
The order of test cases running is not guaranteed, until it set dependencies.
"""
name: str = ""
criteria: Optional[Criteria] = None
# specify use this rule to select or drop test cases. if it's forced include or
# exclude, it won't be effect by following select actions. And it fails if
# there are force rules conflict.
select_action: str = field(
default=constants.TESTCASE_SELECT_ACTION_INCLUDE,
metadata=config(
mm_field=fields.String(
validate=validate.OneOf(
[
# none means this action part doesn't include or exclude cases
constants.TESTCASE_SELECT_ACTION_NONE,
constants.TESTCASE_SELECT_ACTION_INCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_INCLUDE,
constants.TESTCASE_SELECT_ACTION_EXCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_EXCLUDE,
]
)
),
),
)
# if it's false, the test cases are disable in current run.
# it uses to control test cases dynamic form command line.
enable: bool = field(default=True)
# run this group of test cases several times
# default is 1
times: int = field(default=1, metadata=metadata(validate=validate.Range(min=1)))
# retry times if fails. Default is 0, not to retry.
retry: int = field(default=0, metadata=metadata(validate=validate.Range(min=0)))
# each case with this rule will be run in a new environment.
use_new_environment: bool = False
# Once it's set, failed test result will be rewrite to success
# it uses to work around some cases temporarily, don't overuse it.
# default is false
ignore_failure: bool = False
# case should run on a specified environment
environment: str = ""
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Config:
# run name prefix to help grouping results and put it in title.
name: str = "not_named"
parent: Optional[List[Parent]] = field(default=None)
extension: Optional[Extension] = field(default=None)
variable: Optional[List[Variable]] = field(default=None)
artifact: Optional[List[Artifact]] = field(default=None)
environment: Optional[EnvironmentRoot] = field(default=None)
notifier: Optional[List[Notifier]] = field(default=None)
platform: List[Platform] = field(default_factory=list)
testcase: List[TestCase] = field(default_factory=list)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if not self.platform:
self.platform = [Platform(constants.PLATFORM_READY)]
if not self.testcase:
self.testcase = [TestCase("test", Criteria(area="demo"))]

Просмотреть файл

@ -1,302 +0,0 @@
# run name prefix to help grouping results and put it in title.
name:
type: string
# share configurations for similar runs.
parent:
type: list
schema:
type: dict
schema:
path:
type: string
required: true
# for simple merge, this part is optional.
# operations include:
# overwrite: default behavior. add non-exist items and replace exist.
# remove: remove specified path totally
# add: add non-exist, not replace exist.
strategy:
type: list
schema:
type: dict
schema:
path:
type: string
operation:
type: string
allowed:
- overwrite
- add
- remove
# add extended classes can be put in folders and include here. it doesn't matter how
# those files are organized, lisa loads by their inherits relationship. if there is any
# conflict on type name, there should be an error message.
extension:
type: dict
schema:
paths:
type: list
schema:
type: string
# it uses to support variables in other fields.
# duplicate items will be overwritten one by one.
# if a variable is not defined here, LISA can fail earlier to ask check it.
# file path is relative to LISA command starts.
variable:
type: list
schema:
type: dict
schema:
# If it's secret, it will be removed from log and other output information.
# secret files also need to be removed after test
# it's not recommended highly to put secret in configurations directly.
isSecret:
type: boolean
file:
type: string
excludes:
- name
- value
# continue to support v2 format. it's simple.
regex: "[\\w\\W]*[.](xml|yml|yaml)$"
# redefine parameters in file to reuse configurations
name:
type: string
excludes: file
value:
type: string
excludes: file
# supports multiple artifacts in future.
artifact:
type: list
schema:
type: dict
schema:
# name is optional. artifacts can be referred by name or index.
name:
type: string
type:
type: string
required: true
allowed:
- vhd
locations:
type: list
required: true
schema:
type: dict
schema:
type:
type: string
allowed:
- http
path:
type: string
environment:
type: dict
schema:
maxConcurrency:
type: number
min: 1
required: false
warnAsError:
type: boolean
environments:
type: list
schema:
type: dict
schema:
name:
type: string
# the environment spec may not be fully supported by each platform.
# If so, there is a warning message.
# Environment spec can be forced to apply, as error is loud.
topology:
type: string
allowed:
- subnet
# template and nodes conflicts, they should have only one.
# it uses to prevent duplicate content for big amount nodes.
template:
type: dict
excludes: nodes
schema:
nodeCount:
type: integer
min: 1
# optional, if there is only one artifact.
artifact:
type: string
vcpu:
type: integer
min: 1
memoryGB:
type: integer
min: 1
gpuCount:
type: integer
min: 0
nodes:
type: list
excludes: template
schema:
type: dict
oneof_schema:
# local and remote are node type, and don't need platform to handle
- type:
type: string
allowed:
- local
name:
type: string
# If test suite doesn't specify where to run a case,
# it should be run on default node.
isDefault:
type: boolean
- type:
type: string
allowed:
- remote
name:
type: string
isDefault:
type: boolean
address:
type: string
dependencies:
type: remote
port:
type: integer
min: 1
max: 65535
publicAddress:
type: string
publicPort:
type: integer
min: 1
max: 65535
username:
type: string
password:
type: string
privateKeyFile:
type: string
- type:
type: string
allowed:
- spec
name:
type: string
isDefault:
type: boolean
artifact:
type: string
vcpu:
type: integer
min: 1
memoryGB:
type: integer
min: 1
gpuCount:
type: integer
min: 0
# it sends test progress and results to any place wanted.
notifier:
type: list
schema:
type: dict
schema:
# detail types are defined in notifier itself, allowed items are handled in code.
type:
type: string
allowed:
- junit
- html
- database
# TODO: temp allow unknown
allow_unknown: true
# it's examples for all platforms, but currently should leave single one only.
platform:
type: list
schema:
type: dict
schema:
type:
type: string
allowed:
# It uses to pure existing environment.
# run test cases on existing machine.
- ready
- azure
# rules apply ordered on previous selection.
# The order of test cases running is not guaranteed, until it set dependencies.
testcase:
type: list
schema:
type: dict
schema:
# all rules in same criteria are AND condition.
# we may support richer conditions later.
# match case by name pattern
criteria:
type: dict
schema:
name:
type: string
area:
type: string
category:
type: string
priority:
oneof:
- type: integer
min: 0
max: 3
- type: list
schema:
type: integer
# tag is a simple way to include test cases within same topic.
tag:
oneof:
- type: list
schema:
type: string
- type: string
# specify use this rule to select or drop test cases. if it's forced include or
# exclude, it won't be effect by following select actions. And it fails if
# there are force rules conflict.
selectAction:
type: string
allowed:
# none means this action part doesn't include or exclude cases
- none
- include
- exclude
- forceInclude
- forceExclude
# if it's false, the test cases are disable in current run.
# it uses to control test cases dynamic form command line.
enable:
type: boolean
name:
type: string
# run this group of test cases several times
# default is 1
times:
type: integer
min: 1
# retry times if fails. Default is 0, not to retry.
retry:
type: integer
min: 0
# each case with this rule will be run in a new environment.
useNewEnvironment:
type: boolean
# Once it's set, failed test result will be rewrite to success
# it uses to work around some cases temporarily, don't overuse it.
# default is false
ignoreFailure:
type: boolean
# case should run on a specified environment
environment:
type: string

Просмотреть файл

Просмотреть файл

@ -0,0 +1,156 @@
import logging
import os
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional, Type
from azure.identity import DefaultAzureCredential # type: ignore
from azure.mgmt.resource import ( # type: ignore
ResourceManagementClient,
SubscriptionClient,
)
from dataclasses_json import LetterCase, dataclass_json # type:ignore
from marshmallow import validate
from lisa import schema
from lisa.environment import Environment
from lisa.platform_ import Platform
from lisa.util import constants
from lisa.util.exceptions import LisaException
AZURE = "azure"
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzurePlatformSchema:
service_principal_tenant_id: str = field(
default="",
metadata=schema.metadata(
data_key="servicePrincipalTenantId",
validate=validate.Regexp(constants.GUID_REGEXP),
),
)
service_principal_client_id: str = field(
default="",
metadata=schema.metadata(
data_key="servicePrincipalClientId",
validate=validate.Regexp(constants.GUID_REGEXP),
),
)
service_principal_key: str = field(default="")
subscription_id: str = field(
default="",
metadata=schema.metadata(
data_key="subscriptionId", validate=validate.Regexp(constants.GUID_REGEXP),
),
)
resource_group_name: str = field(default="")
location: str = field(default="westus2")
log_level: str = field(
default=logging.getLevelName(logging.WARN),
metadata=schema.metadata(
data_key="logLevel",
validate=validate.OneOf(
[
logging.getLevelName(logging.ERROR),
logging.getLevelName(logging.WARN),
logging.getLevelName(logging.INFO),
logging.getLevelName(logging.DEBUG),
]
),
),
)
dry_run: bool = False
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class AzureNodeSchema:
vm_size: str = field(default="")
class AzurePlatform(Platform):
def __init__(self) -> None:
super().__init__()
self._credential: DefaultAzureCredential = None
self._enviornment_counter = 0
@classmethod
def platform_type(cls) -> str:
return AZURE
@property
def platform_schema(self) -> Optional[Type[Any]]:
return AzurePlatformSchema
@property
def node_schema(self) -> Optional[Type[Any]]:
return AzureNodeSchema
def _request_environment(self, environment: Environment) -> Environment:
assert self._rm_client
assert environment.data, "env data cannot be None"
env_data: schema.Environment = environment.data
if self._azure_config.resource_group_name:
resource_group_name = self._azure_config.resource_group_name
self._log.info(f"reusing resource group: {resource_group_name}")
else:
normalized_run_name = constants.NORMALIZE_PATTERN.sub(
"_", constants.RUN_NAME
)
resource_group_name = f"{normalized_run_name}_e{self._enviornment_counter}"
self._enviornment_counter += 1
self._log.info(f"creating resource group: {resource_group_name}")
if self._azure_config.dry_run:
self._log.info(f"dry_run: {self._azure_config.dry_run}")
else:
resource_group = self._rm_client.resource_groups.create_or_update(
resource_group_name, {"location": self._azure_config.location}
)
self._log.info(f"created resource group is {resource_group}")
nodes_parameters: List[Dict[str, Any]] = []
for node in env_data.nodes:
assert isinstance(node, schema.NodeSpec)
node_parameter: Dict[str, Any] = dict()
node_parameter["vcpu"] = node.cpu_count
nodes_parameters.append(node_parameter)
self._rm_client.deployments.validate(nodes_parameters)
return environment
def _delete_environment(self, environment: Environment) -> None:
pass
def _initialize(self) -> None:
# set needed environment variables for authentication
self._azure_config = self._root_config.get_extended_schema(AzurePlatformSchema)
assert self._azure_config, "azure config cannot be empty"
# set azure log to warn level only
logging.getLogger("azure").setLevel(self._azure_config.log_level)
os.environ["AZURE_TENANT_ID"] = self._azure_config.service_principal_tenant_id
os.environ["AZURE_CLIENT_ID"] = self._azure_config.service_principal_client_id
os.environ["AZURE_CLIENT_SECRET"] = self._azure_config.service_principal_key
self._credential = DefaultAzureCredential()
self._sub_client = SubscriptionClient(self._credential)
self._subscription_id = self._azure_config.subscription_id
subscription = self._sub_client.subscriptions.get(self._subscription_id)
if not subscription:
raise LisaException(
f"cannot find subscription id: '{self._subscription_id}'"
)
self._log.info(f"connected to subscription: '{subscription.display_name}'")
self._rm_client = ResourceManagementClient(
credential=self._credential, subscription_id=self._subscription_id
)

Просмотреть файл

@ -12,9 +12,9 @@ class ReadyPlatform(Platform):
# ready platform has no config
pass
def _request_environment_internal(self, environment: Environment) -> Environment:
def _request_environment(self, environment: Environment) -> Environment:
return environment
def _delete_environment_internal(self, environment: Environment) -> None:
def _delete_environment(self, environment: Environment) -> None:
# ready platform doesn't support delete environment
pass

Просмотреть файл

@ -45,11 +45,8 @@ class LISARunner(Action):
test_suite_cases.append(test_result)
test_suites[test_case_data.metadata.suite] = test_suite_cases
platform_type = self.platform.platform_type()
# request environment
self._log.info(f"platform {platform_type} environment requesting")
environment = environments.default
self._log.info(f"platform {platform_type} environment requested")
environment = self.platform.request_environment(environments.default)
self._log.info(f"start running {len(test_results)} cases")
for test_suite_data in test_suites:
@ -73,14 +70,7 @@ class LISARunner(Action):
self._log.info(f" {key.name}\t: {result_count_dict.get(key, 0)}")
# delete enviroment after run
self._log.info(
f"platform {platform_type} environment {environment.name} deleting"
)
self.platform.delete_environment(environment)
self._log.info(
f"platform {platform_type} environment {environment.name} deleted"
)
self.set_status(ActionStatus.SUCCESS)
async def stop(self) -> None:

Просмотреть файл

@ -1,7 +1,7 @@
from typing import Any, List
from unittest import TestCase
from lisa.schema import validate_config
from lisa.parameter_parser.config import validate
from lisa.testselector import select_testcases
from lisa.testsuite import (
TestCaseData,
@ -155,10 +155,10 @@ class SelectorTestCase(TestCase):
def _select_and_check(
self, config: List[Any], expected_descriptions: List[str]
) -> List[TestCaseData]:
root_config = {"testcase": config}
validate_config(root_config)
root_config = {constants.TESTCASE: config}
config_instance = validate(root_config)
case_metadatas = self._generate_metadata()
selected = select_testcases(config, case_metadatas)
selected = select_testcases(config_instance.testcase, case_metadatas)
self.assertListEqual(
expected_descriptions, [case.description for case in selected]
)

Просмотреть файл

@ -1,18 +1,8 @@
import re
from functools import partial
from typing import (
Any,
Callable,
Dict,
List,
Mapping,
Optional,
Pattern,
Set,
Union,
cast,
)
from typing import Callable, Dict, List, Mapping, Optional, Pattern, Set, Union, cast
from lisa import schema
from lisa.testsuite import TestCaseData, TestCaseMetadata, get_cases_metadata
from lisa.util import constants
from lisa.util.exceptions import LisaException
@ -22,7 +12,8 @@ _get_logger = partial(get_logger, "init", "selector")
def select_testcases(
filters: Any = None, init_cases: Optional[List[TestCaseMetadata]] = None
filters: Optional[List[schema.TestCase]] = None,
init_cases: Optional[List[TestCaseMetadata]] = None,
) -> List[TestCaseData]:
"""
based on filters to select test cases. If filters are None, return all cases.
@ -40,9 +31,7 @@ def select_testcases(
force_included: Set[str] = set()
force_excluded: Set[str] = set()
for filter in filters:
filter = cast(Dict[str, Any], filter)
enabled = filter.get(constants.ENABLE, True)
if enabled:
if filter.enable:
selected = _apply_filter(
filter, selected, force_included, force_excluded, full_list
)
@ -106,19 +95,19 @@ def _match_cases(
def _apply_settings(
applied_case_data: TestCaseData, config: Dict[str, Any], action: str
applied_case_data: TestCaseData, schema_data: schema.TestCase, action: str
) -> None:
field_mapping = {
"times": constants.TESTCASE_TIMES,
"retry": constants.TESTCASE_RETRY,
"use_new_environmnet": constants.TESTCASE_USE_NEW_ENVIRONMENT,
"ignore_failure": constants.TESTCASE_IGNORE_FAILURE,
"environment": constants.ENVIRONMENT,
}
for (attr_name, schema_name) in field_mapping.items():
schema_value = config.get(schema_name)
if schema_value:
setattr(applied_case_data, attr_name, schema_value)
fields = [
constants.TESTCASE_TIMES,
constants.TESTCASE_RETRY,
constants.TESTCASE_USE_NEW_ENVIRONMENT,
constants.TESTCASE_IGNORE_FAILURE,
constants.ENVIRONMENT,
]
for field in fields:
data = getattr(schema_data, field)
if data is not None:
setattr(applied_case_data, field, data)
# use default value from selector
applied_case_data.select_action = action
@ -130,12 +119,12 @@ def _force_check(
force_expected_set: Set[str],
force_exclusive_set: Set[str],
temp_force_exclusive_set: Set[str],
config: Any,
schema_data: schema.TestCase,
) -> bool:
is_skip = False
if name in force_exclusive_set:
if is_force:
raise LisaException(f"case {name} has force conflict on {config}")
raise LisaException(f"case {name} has force conflict on {schema_data}")
else:
temp_force_exclusive_set.add(name)
is_skip = True
@ -145,7 +134,7 @@ def _force_check(
def _apply_filter(
config: Dict[str, Any],
schema_data: schema.TestCase,
current_selected: Dict[str, TestCaseData],
force_included: Set[str],
force_excluded: Set[str],
@ -155,46 +144,49 @@ def _apply_filter(
log = _get_logger()
# initialize criterias
patterns: List[Callable[[Union[TestCaseData, TestCaseMetadata]], bool]] = []
criterias_config: Dict[str, Any] = config.get(constants.TESTCASE_CRITERIA, dict())
for config_key in criterias_config:
if config_key in [
criterias_data = schema_data.criteria
assert criterias_data, "test case criteria cannot be None"
criterias_data_dict = criterias_data.__dict__
for data_key, data_value in criterias_data_dict.items():
if data_value is None:
continue
if data_key in [
constants.NAME,
constants.TESTCASE_CRITERIA_AREA,
constants.TESTCASE_CRITERIA_CATEGORY,
]:
pattern = cast(str, criterias_config[config_key])
pattern = cast(str, criterias_data_dict[data_key])
expression = re.compile(pattern)
patterns.append(
partial(_match_string, pattern=expression, attr_name=config_key)
partial(_match_string, pattern=expression, attr_name=data_key)
)
elif data_key == constants.TESTCASE_CRITERIA_PRIORITY:
priority_pattern = cast(
Union[int, List[int]], criterias_data_dict[data_key]
)
elif config_key == constants.TESTCASE_CRITERIA_PRIORITY:
priority_pattern = cast(Union[int, List[int]], criterias_config[config_key])
patterns.append(partial(_match_priority, pattern=priority_pattern))
elif config_key == constants.TESTCASE_CRITERIA_TAG:
tag_pattern = cast(Union[str, List[str]], criterias_config[config_key])
elif data_key == constants.TESTCASE_CRITERIA_TAG:
tag_pattern = cast(Union[str, List[str]], criterias_data_dict[data_key])
patterns.append(partial(_match_tag, criteria_tags=tag_pattern))
else:
raise LisaException(f"unknown criteria key: {config_key}")
raise LisaException(f"unknown criteria key: {data_key}")
# match by select Action:
changed_cases: Dict[str, TestCaseData] = dict()
action = config.get(
constants.TESTCASE_SELECT_ACTION, constants.TESTCASE_SELECT_ACTION_INCLUDE
)
is_force = action in [
is_force = schema_data.select_action in [
constants.TESTCASE_SELECT_ACTION_FORCE_INCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_EXCLUDE,
]
is_update_setting = action in [
is_update_setting = schema_data.select_action in [
constants.TESTCASE_SELECT_ACTION_NONE,
constants.TESTCASE_SELECT_ACTION_INCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_INCLUDE,
]
temp_force_set: Set[str] = set()
if action is constants.TESTCASE_SELECT_ACTION_NONE:
if schema_data.select_action is constants.TESTCASE_SELECT_ACTION_NONE:
# Just apply settings on test cases
changed_cases = _match_cases(current_selected, patterns)
elif action in [
elif schema_data.select_action in [
constants.TESTCASE_SELECT_ACTION_INCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_INCLUDE,
]:
@ -202,7 +194,12 @@ def _apply_filter(
changed_cases = _match_cases(full_list, patterns)
for name, new_case_data in changed_cases.items():
is_skip = _force_check(
name, is_force, force_included, force_excluded, temp_force_set, config
name,
is_force,
force_included,
force_excluded,
temp_force_set,
schema_data,
)
if is_skip:
continue
@ -211,31 +208,37 @@ def _apply_filter(
case_data = current_selected.get(name, new_case_data)
current_selected[name] = case_data
changed_cases[name] = case_data
elif action in [
elif schema_data.select_action in [
constants.TESTCASE_SELECT_ACTION_EXCLUDE,
constants.TESTCASE_SELECT_ACTION_FORCE_EXCLUDE,
]:
changed_cases = _match_cases(current_selected, patterns)
for name in changed_cases:
is_skip = _force_check(
name, is_force, force_excluded, force_included, temp_force_set, config
name,
is_force,
force_excluded,
force_included,
temp_force_set,
schema_data,
)
if is_skip:
continue
del current_selected[name]
else:
raise LisaException(f"unknown selectAction: '{action}'")
raise LisaException(f"unknown selectAction: '{schema_data.select_action}'")
# changed set cannot be operated in it's for loop, so update it here.
for name in temp_force_set:
del changed_cases[name]
if is_update_setting:
for case_data in changed_cases.values():
_apply_settings(case_data, config, action)
_apply_settings(case_data, schema_data, schema_data.select_action)
log.debug(
f"applying action: [{action}] on case [{changed_cases.keys()}], "
f"config: {config}, loaded criteria count: {len(patterns)}"
f"applying action: [{schema_data.select_action}] on "
f"case [{changed_cases.keys()}], "
f"data: {schema_data}, loaded criteria count: {len(patterns)}"
)
return current_selected

Просмотреть файл

@ -103,7 +103,7 @@ class TestCaseData:
self.select_action: str = ""
self.times: int = 1
self.retry: int = 0
self.use_new_environmnet: bool = False
self.use_new_environment: bool = False
self.ignore_failure: bool = False
self.environment_name: str = ""

Просмотреть файл

@ -1,81 +1,71 @@
import re
# config types
CONFIG_CONFIG = "config"
CONFIG_PLATFORM = "platform"
CONFIG_TEST_CASES = "testcases"
RUN_ID = ""
RUN_NAME = ""
# path related
PATH_REMOTE_ROOT = "lisa_working"
PATH_TOOL = "tool"
# patterns
GUID_REGEXP = re.compile(r"^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$")
NORMALIZE_PATTERN = re.compile(r"[^\w\d]")
# list types
LIST = "list"
LIST_CASE = "case"
# common
NODE = "node"
NODES = "nodes"
NAME = "name"
VALUE = "value"
TYPE = "type"
PATH = "path"
PATHS = "paths"
STRATEGY = "strategy"
FILE = "file"
HTTP = "http"
IS_DEFAULT = "isDefault"
WARN_AS_ERROR = "warnAsError"
ENABLE = "enable"
# by level
PARENT = "parent"
OPERATION = "operation"
OPERATION_REMOVE = "remove"
OPERATION_ADD = "add"
OPERATION_OVERWRITE = "overwrite"
# typoplogies
ENVIRONMENTS_SUBNET = "subnet"
EXTENSION = "extension"
VARIABLE = "variable"
VARIABLE_ISSECRET = "isSecret"
ARTIFACT = "artifact"
ARTIFACT_TYPE_VHD = "vhd"
ARTIFACT_LOCATIONS = "locations"
ARTIFACT_LOCATIONS_TYPE_HTTP = "http"
ENVIRONMENT = "environment"
ENVIRONMENT_MAX_CONCURRENCY = "maxConcurrency"
ENVIRONMENTS = "environments"
ENVIRONMENTS_TOPOLOGY = "topology"
ENVIRONMENTS_TEMPLATE = "template"
ENVIRONMENTS_TEMPLATE_NODE_COUNT = "nodeCount"
ENVIRONMENTS_NODES = "nodes"
ENVIRONMENTS_NODES_SPEC = "spec"
ENVIRONMENTS_NODES_REMOTE = "remote"
ENVIRONMENTS_NODES_LOCAL = "local"
ENVIRONMENTS_NODES_REMOTE_ADDRESS = "address"
ENVIRONMENTS_NODES_REMOTE_PORT = "port"
ENVIRONMENTS_NODES_REMOTE_PUBLIC_ADDRESS = "publicAddress"
ENVIRONMENTS_NODES_REMOTE_PUBLIC_PORT = "publicPort"
ENVIRONMENTS_NODES_REMOTE_PUBLIC_ADDRESS = "public_address"
ENVIRONMENTS_NODES_REMOTE_PUBLIC_PORT = "public_port"
ENVIRONMENTS_NODES_REMOTE_USERNAME = "username"
ENVIRONMENTS_NODES_REMOTE_PASSWORD = "password"
ENVIRONMENTS_NODES_REMOTE_PRIVATEKEYFILE = "privateKeyFile"
NOTIFIER = "notifier"
ENVIRONMENTS_NODES_REMOTE_PRIVATE_KEY_FILE = "private_key_file"
PLATFORM = "platform"
PLATFORM_AZURE = "azure"
PLATFORM_READY = "ready"
# test case fields
TESTCASE = "testcase"
# test case fields
TESTCASE_CRITERIA = "criteria"
TESTCASE_CRITERIA_AREA = "area"
TESTCASE_CRITERIA_CATEGORY = "category"
TESTCASE_CRITERIA_PRIORITY = "priority"
TESTCASE_CRITERIA_TAG = "tag"
TESTCASE_SELECT_ACTION = "selectAction"
TESTCASE_SELECT_ACTION = "select_action"
TESTCASE_SELECT_ACTION_NONE = "none"
TESTCASE_SELECT_ACTION_INCLUDE = "include"
TESTCASE_SELECT_ACTION_EXCLUDE = "exclude"
@ -84,5 +74,5 @@ TESTCASE_SELECT_ACTION_FORCE_EXCLUDE = "forceExclude"
TESTCASE_TIMES = "times"
TESTCASE_RETRY = "retry"
TESTCASE_USE_NEW_ENVIRONMENT = "useNewEnvironment"
TESTCASE_IGNORE_FAILURE = "ignoreFailure"
TESTCASE_USE_NEW_ENVIRONMENT = "use_new_environment"
TESTCASE_IGNORE_FAILURE = "ignore_failure"

Просмотреть файл

@ -56,7 +56,7 @@ class Process:
) -> None:
# the shell can be LocalShell or SshShell
self._shell = shell
self._id = id_
self._id_ = id_
self._is_linux = is_linux
self._running: bool = False
self._log = get_logger("cmd", id_, parent=parent_logger)

Просмотреть файл

@ -17,23 +17,23 @@ class ConnectionInfo:
port: int = 22,
username: str = "root",
password: Optional[str] = "",
privatekey_file: str = "",
private_key_file: str = "",
) -> None:
self.address = address
self.port = port
self.username = username
self.password = password
self.privatekey_file = privatekey_file
self.private_key_file = private_key_file
if not self.password and not self.privatekey_file:
if not self.password and not self.private_key_file:
raise LisaException(
"at least one of password and privateKeyFile need to be set"
)
elif not self.privatekey_file:
elif not self.private_key_file:
self._use_password = True
else:
if not Path(self.privatekey_file).exists():
raise FileNotFoundError(self.privatekey_file)
if not Path(self.private_key_file).exists():
raise FileNotFoundError(self.private_key_file)
self.password = None
self._use_password = False
@ -54,7 +54,7 @@ class SshShell:
port=self._connection_info.port,
username=self._connection_info.username,
password=self._connection_info.password,
private_key_file=self._connection_info.privatekey_file,
private_key_file=self._connection_info.private_key_file,
missing_host_key=spur.ssh.MissingHostKey.accept,
)

472
poetry.lock сгенерированный
Просмотреть файл

@ -26,13 +26,71 @@ description = "Classes Without Boilerplate"
name = "attrs"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "19.3.0"
version = "20.1.0"
[package.extras]
azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
docs = ["sphinx", "zope.interface"]
tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"]
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
[[package]]
category = "main"
description = "Microsoft Azure Client Library for Python (Common)"
name = "azure-common"
optional = false
python-versions = "*"
version = "1.1.25"
[[package]]
category = "main"
description = "Microsoft Azure Core Library for Python"
name = "azure-core"
optional = false
python-versions = "*"
version = "1.8.0"
[package.dependencies]
requests = ">=2.18.4"
six = ">=1.6"
[[package]]
category = "main"
description = "Microsoft Azure Identity Library for Python"
name = "azure-identity"
optional = false
python-versions = "*"
version = "1.4.0"
[package.dependencies]
azure-core = ">=1.0.0,<2.0.0"
cryptography = ">=2.1.4"
msal = ">=1.3.0,<2.0.0"
msal-extensions = ">=0.2.2,<0.3.0"
six = ">=1.6"
[[package]]
category = "main"
description = "Microsoft Azure Management Core Library for Python"
name = "azure-mgmt-core"
optional = false
python-versions = "*"
version = "1.2.0"
[package.dependencies]
azure-core = ">=1.7.0.dev,<2.0.0"
[[package]]
category = "main"
description = "Microsoft Azure Resource Management Client Library for Python"
name = "azure-mgmt-resource"
optional = false
python-versions = "*"
version = "15.0.0b1"
[package.dependencies]
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.0.0,<2.0.0"
msrest = ">=0.5.0"
[[package]]
category = "main"
@ -72,14 +130,11 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
category = "main"
description = "Lightweight, extensible schema and data validation tool for Python dictionaries."
name = "cerberus"
description = "Python package for providing Mozilla's CA Bundle."
name = "certifi"
optional = false
python-versions = ">=2.7"
version = "1.3.2"
[package.dependencies]
setuptools = "*"
python-versions = "*"
version = "2020.6.20"
[[package]]
category = "main"
@ -92,6 +147,14 @@ version = "1.14.2"
[package.dependencies]
pycparser = "*"
[[package]]
category = "main"
description = "Universal encoding detector for Python 2 and 3"
name = "chardet"
optional = false
python-versions = "*"
version = "3.0.4"
[[package]]
category = "dev"
description = "Composable command line interface toolkit"
@ -120,6 +183,23 @@ pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
[[package]]
category = "main"
description = "Easily serialize dataclasses to and from JSON"
name = "dataclasses-json"
optional = false
python-versions = ">=3.6"
version = "0.5.2"
[package.dependencies]
marshmallow = ">=3.3.0,<4.0.0"
marshmallow-enum = ">=1.5.1,<2.0.0"
stringcase = "1.2.0"
typing-inspect = ">=0.4.0"
[package.extras]
dev = ["pytest", "ipython", "mypy (>=0.710)", "hypothesis", "portray", "flake8", "simplejson"]
[[package]]
category = "main"
description = "Decorators for Humans"
@ -171,18 +251,15 @@ description = "flake8 plugin that integrates isort ."
name = "flake8-isort"
optional = false
python-versions = "*"
version = "3.0.1"
version = "4.0.0"
[package.dependencies]
flake8 = ">=3.2.1,<4"
isort = ">=4.3.5,<6"
testfixtures = ">=6.8.0,<7"
[package.dependencies.isort]
extras = ["pyproject"]
version = ">=4.3.5,<5"
[package.extras]
test = ["pytest (>=4.0.2,<6)"]
test = ["pytest (>=4.0.2,<6)", "toml"]
[[package]]
category = "main"
@ -198,19 +275,37 @@ asttokens = ">=2,<3"
[package.extras]
dev = ["mypy (0.750)", "pylint (2.3.1)", "yapf (0.20.2)", "tox (>=3.0.0)", "pydocstyle (>=2.1.1,<3)", "coverage (>=4.5.1,<5)", "docutils (>=0.14,<1)", "pygments (>=2.2.0,<3)", "dpcontracts (0.6.0)", "tabulate (>=0.8.7,<1)", "py-cpuinfo (>=5.0.0,<6)"]
[[package]]
category = "main"
description = "Internationalized Domain Names in Applications (IDNA)"
name = "idna"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.10"
[[package]]
category = "main"
description = "An ISO 8601 date/time/duration parser and formatter"
name = "isodate"
optional = false
python-versions = "*"
version = "0.6.0"
[package.dependencies]
six = "*"
[[package]]
category = "dev"
description = "A Python utility / library to sort Python imports."
name = "isort"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "4.3.21"
python-versions = ">=3.6,<4.0"
version = "5.4.2"
[package.extras]
pipfile = ["pipreqs", "requirementslib"]
pyproject = ["toml"]
requirements = ["pipreqs", "pip-api"]
xdg_home = ["appdirs (>=1.4.0)"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile_deprecated_finder = ["pipreqs", "requirementslib", "tomlkit (>=0.5.3)"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
[[package]]
category = "dev"
@ -227,6 +322,31 @@ parso = ">=0.7.0,<0.8.0"
qa = ["flake8 (3.7.9)"]
testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"]
[[package]]
category = "main"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
name = "marshmallow"
optional = false
python-versions = ">=3.5"
version = "3.7.1"
[package.extras]
dev = ["pytest", "pytz", "simplejson", "mypy (0.782)", "flake8 (3.8.3)", "flake8-bugbear (20.1.4)", "pre-commit (>=2.4,<3.0)", "tox"]
docs = ["sphinx (3.1.2)", "sphinx-issues (1.2.0)", "alabaster (0.7.12)", "sphinx-version-warning (1.1.2)", "autodocsumm (0.1.13)"]
lint = ["mypy (0.782)", "flake8 (3.8.3)", "flake8-bugbear (20.1.4)", "pre-commit (>=2.4,<3.0)"]
tests = ["pytest", "pytz", "simplejson"]
[[package]]
category = "main"
description = "Enum field for Marshmallow"
name = "marshmallow-enum"
optional = false
python-versions = "*"
version = "1.5.1"
[package.dependencies]
marshmallow = ">=2.0.0"
[[package]]
category = "dev"
description = "McCabe checker, plugin for flake8"
@ -235,6 +355,54 @@ optional = false
python-versions = "*"
version = "0.6.1"
[[package]]
category = "main"
description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect."
name = "msal"
optional = false
python-versions = "*"
version = "1.4.3"
[package.dependencies]
requests = ">=2.0.0,<3"
PyJWT = {version = ">=1.0.0,<2", extras = ["crypto"]}
[[package]]
category = "main"
description = ""
name = "msal-extensions"
optional = false
python-versions = "*"
version = "0.2.2"
[package.dependencies]
msal = ">=0.4.1,<2.0.0"
[[package.dependencies.portalocker]]
markers = "platform_system != \"Windows\""
version = ">=1.0,<2.0"
[[package.dependencies.portalocker]]
markers = "platform_system == \"Windows\""
version = ">=1.6,<2.0"
[[package]]
category = "main"
description = "AutoRest swagger generator Python client runtime."
name = "msrest"
optional = false
python-versions = "*"
version = "0.6.18"
[package.dependencies]
certifi = ">=2017.4.17"
isodate = ">=0.6.0"
requests = ">=2.16,<3.0"
requests-oauthlib = ">=0.5.0"
[package.extras]
async = ["aiohttp (>=3.0)", "aiodns"]
[[package]]
category = "dev"
description = "Optional static typing for Python"
@ -252,13 +420,26 @@ typing-extensions = ">=3.7.4"
dmypy = ["psutil (>=4.0)"]
[[package]]
category = "dev"
category = "main"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
name = "mypy-extensions"
optional = false
python-versions = "*"
version = "0.4.3"
[[package]]
category = "main"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
name = "oauthlib"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "3.1.0"
[package.extras]
rsa = ["cryptography"]
signals = ["blinker"]
signedtoken = ["cryptography", "pyjwt (>=1.0.0)"]
[[package]]
category = "main"
description = "SSH2 protocol library"
@ -308,6 +489,21 @@ version = "0.13.1"
[package.extras]
dev = ["pre-commit", "tox"]
[[package]]
category = "main"
description = "Wraps the portalocker recipe for easy usage"
name = "portalocker"
optional = false
python-versions = "*"
version = "1.7.1"
[package.dependencies]
pywin32 = {version = "!=226", markers = "platform_system == \"Windows\""}
[package.extras]
docs = ["sphinx (>=1.7.1)"]
tests = ["pytest (>=4.6.9)", "pytest-cov (>=2.8.1)", "sphinx (>=1.8.5)", "pytest-flake8 (>=1.0.5)"]
[[package]]
category = "main"
description = "Cross-platform lib for process and system monitoring in Python."
@ -351,6 +547,22 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.2.0"
[[package]]
category = "main"
description = "JSON Web Token implementation in Python"
name = "pyjwt"
optional = false
python-versions = "*"
version = "1.7.1"
[package.dependencies]
cryptography = {version = ">=1.4", optional = true, markers = "extra == \"crypto\""}
[package.extras]
crypto = ["cryptography (>=1.4)"]
flake8 = ["flake8", "flake8-import-order", "pep8-naming"]
test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"]
[[package]]
category = "dev"
description = "Black plugin for the Python Language Server"
@ -419,7 +631,7 @@ python-versions = "*"
version = "0.3.4"
[package.dependencies]
ujson = "<=1.35"
ujson = {version = "<=1.35", markers = "platform_system != \"Windows\""}
[package.extras]
test = ["versioneer", "pylint", "pycodestyle", "pyflakes", "pytest", "mock", "pytest-cov", "coverage"]
@ -436,7 +648,7 @@ version = "0.34.1"
jedi = ">=0.17.0,<0.18.0"
pluggy = "*"
python-jsonrpc-server = ">=0.3.2"
ujson = "<=1.35"
ujson = {version = "<=1.35", markers = "platform_system != \"Windows\""}
[package.extras]
all = ["autopep8", "flake8 (>=3.8.0)", "mccabe (>=0.6.0,<0.7.0)", "pycodestyle (>=2.6.0,<2.7.0)", "pydocstyle (>=2.0.0)", "pyflakes (>=2.2.0,<2.3.0)", "pylint", "rope (>=0.10.5)", "yapf"]
@ -451,6 +663,14 @@ rope = ["rope (>0.10.5)"]
test = ["versioneer", "pylint", "pytest", "mock", "pytest-cov", "coverage", "numpy", "pandas", "matplotlib", "pyqt5"]
yapf = ["yapf"]
[[package]]
category = "main"
description = "Python for Window Extensions"
name = "pywin32"
optional = false
python-versions = "*"
version = "228"
[[package]]
category = "main"
description = "YAML parser and emitter for Python"
@ -467,6 +687,39 @@ optional = false
python-versions = "*"
version = "2020.7.14"
[[package]]
category = "main"
description = "Python HTTP for Humans."
name = "requests"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "2.24.0"
[package.dependencies]
certifi = ">=2017.4.17"
chardet = ">=3.0.2,<4"
idna = ">=2.5,<3"
urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
[package.extras]
security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
[[package]]
category = "main"
description = "OAuthlib authentication support for Requests."
name = "requests-oauthlib"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.3.0"
[package.dependencies]
oauthlib = ">=3.0.0"
requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib (>=3.0.0)"]
[[package]]
category = "main"
description = "Easy to use retry decorator."
@ -526,6 +779,14 @@ typing_extensions = ">=3.6.2.1"
[package.extras]
dev = ["mypy (0.620)", "pylint (1.8.2)", "yapf (0.20.2)", "tox (>=3.0.0)", "coverage (>=4.5.1,<5)", "pydocstyle (>=2.1.1,<3)"]
[[package]]
category = "main"
description = "String case converter."
name = "stringcase"
optional = false
python-versions = "*"
version = "1.2.0"
[[package]]
category = "main"
description = "Wraps tempfile to give you pathlib.Path."
@ -573,20 +834,43 @@ description = "Backported and Experimental Type Hints for Python 3.5+"
name = "typing-extensions"
optional = false
python-versions = "*"
version = "3.7.4.2"
version = "3.7.4.3"
[[package]]
category = "main"
description = "Runtime inspection utilities for typing module."
name = "typing-inspect"
optional = false
python-versions = "*"
version = "0.6.0"
[package.dependencies]
mypy-extensions = ">=0.3.0"
typing-extensions = ">=3.7.4"
[[package]]
category = "dev"
description = "Ultra fast JSON encoder and decoder for Python"
marker = "platform_system != \"Windows\""
name = "ujson"
optional = false
python-versions = "*"
version = "1.35"
[[package]]
category = "main"
description = "HTTP library with thread-safe connection pooling, file post, and more."
name = "urllib3"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
version = "1.25.10"
[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"]
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
[metadata]
content-hash = "335202fc201bde2e36935c60a57af87ade36948faeafe381675b0f7a93980407"
lock-version = "1.0"
content-hash = "e6acb4c0b78ec5fe460271a163aba570ba742d4522fd11e6376e806a18958bef"
python-versions = "^3.8"
[metadata.files]
@ -599,8 +883,28 @@ asttokens = [
{file = "asttokens-2.0.4.tar.gz", hash = "sha256:a42e57e28f2ac1c85ed9b1f84109401427e5c63c04f61d15b8842b027eec5128"},
]
attrs = [
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
{file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
{file = "attrs-20.1.0-py2.py3-none-any.whl", hash = "sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff"},
{file = "attrs-20.1.0.tar.gz", hash = "sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a"},
]
azure-common = [
{file = "azure-common-1.1.25.zip", hash = "sha256:ce0f1013e6d0e9faebaf3188cc069f4892fc60a6ec552e3f817c1a2f92835054"},
{file = "azure_common-1.1.25-py2.py3-none-any.whl", hash = "sha256:fd02e4256dc9cdd2d4422bc795bdca2ef302f7a86148b154fbf4ea1f09da400a"},
]
azure-core = [
{file = "azure-core-1.8.0.zip", hash = "sha256:c89bbdcdc13ad45fe57d775ed87b15baf6d0b039a1ecd0a1bc91d2f713cb1f08"},
{file = "azure_core-1.8.0-py2.py3-none-any.whl", hash = "sha256:84bff2b05ce989942e7ca3a13237441fbd8ff6855aaf2979b2bc94b74a02be5f"},
]
azure-identity = [
{file = "azure-identity-1.4.0.zip", hash = "sha256:820e1f3e21f90d36063239c6cb7ca9a6bb644cb120a6b1ead3081cafdf6ceaf8"},
{file = "azure_identity-1.4.0-py2.py3-none-any.whl", hash = "sha256:92ccea6c6ac7724d186cb73422d1ad8f525202dce2bdc17f35c695948fadf222"},
]
azure-mgmt-core = [
{file = "azure-mgmt-core-1.2.0.zip", hash = "sha256:8fe3b59446438f27e34f7b24ea692a982034d9e734617ca1320eedeee1939998"},
{file = "azure_mgmt_core-1.2.0-py2.py3-none-any.whl", hash = "sha256:6966226111e92dff26d984aa1c76f227ce0e8b2069c45c72cfb67f160c452444"},
]
azure-mgmt-resource = [
{file = "azure-mgmt-resource-15.0.0b1.zip", hash = "sha256:e7d2c514f1ce14ccd6ddb75398625b4784a784eb7de052b10ac446438959795e"},
{file = "azure_mgmt_resource-15.0.0b1-py2.py3-none-any.whl", hash = "sha256:e294d22c42da23a94cb00998ab91d96b293efedc24427a88fdafd5ed70997abf"},
]
bcrypt = [
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"},
@ -615,8 +919,9 @@ black = [
{file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"},
{file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"},
]
cerberus = [
{file = "Cerberus-1.3.2.tar.gz", hash = "sha256:302e6694f206dd85cb63f13fd5025b31ab6d38c99c50c6d769f8fa0b0f299589"},
certifi = [
{file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"},
{file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"},
]
cffi = [
{file = "cffi-1.14.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:da9d3c506f43e220336433dffe643fbfa40096d408cb9b7f2477892f369d5f82"},
@ -648,6 +953,10 @@ cffi = [
{file = "cffi-1.14.2-cp38-cp38-win_amd64.whl", hash = "sha256:12a453e03124069b6896107ee133ae3ab04c624bb10683e1ed1c1663df17c13c"},
{file = "cffi-1.14.2.tar.gz", hash = "sha256:ae8f34d50af2c2154035984b8b5fc5d9ed63f32fe615646ab435b05b132ca91b"},
]
chardet = [
{file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
{file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
]
click = [
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
@ -673,6 +982,10 @@ cryptography = [
{file = "cryptography-3.0-cp38-cp38-win_amd64.whl", hash = "sha256:bea0b0468f89cdea625bb3f692cd7a4222d80a6bdafd6fb923963f2b9da0e15f"},
{file = "cryptography-3.0.tar.gz", hash = "sha256:8e924dbc025206e97756e8903039662aa58aa9ba357d8e1d8fc29e3092322053"},
]
dataclasses-json = [
{file = "dataclasses-json-0.5.2.tar.gz", hash = "sha256:56ec931959ede74b5dedf65cf20772e6a79764d20c404794cce0111c88c085ff"},
{file = "dataclasses_json-0.5.2-py3-none-any.whl", hash = "sha256:b746c48d9d8e884e2a0ffa59c6220a1b21f94d4f9f12c839da0a8a0efd36dc19"},
]
decorator = [
{file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"},
{file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"},
@ -689,24 +1002,52 @@ flake8-bugbear = [
{file = "flake8_bugbear-20.1.4-py36.py37.py38-none-any.whl", hash = "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63"},
]
flake8-isort = [
{file = "flake8-isort-3.0.1.tar.gz", hash = "sha256:5d976da513cc390232ad5a9bb54aee8a092466a15f442d91dfc525834bee727a"},
{file = "flake8_isort-3.0.1-py2.py3-none-any.whl", hash = "sha256:df1dd6dd73f6a8b128c9c783356627231783cccc82c13c6dc343d1a5a491699b"},
{file = "flake8-isort-4.0.0.tar.gz", hash = "sha256:2b91300f4f1926b396c2c90185844eb1a3d5ec39ea6138832d119da0a208f4d9"},
{file = "flake8_isort-4.0.0-py2.py3-none-any.whl", hash = "sha256:729cd6ef9ba3659512dee337687c05d79c78e1215fdf921ed67e5fe46cce2f3c"},
]
icontract = [
{file = "icontract-2.3.4.tar.gz", hash = "sha256:5e45f7fcf957375163d63ef9b34a0413a15024f35029fd4b1f2ec21d8463879f"},
]
idna = [
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
]
isodate = [
{file = "isodate-0.6.0-py2.py3-none-any.whl", hash = "sha256:aa4d33c06640f5352aca96e4b81afd8ab3b47337cc12089822d6f322ac772c81"},
{file = "isodate-0.6.0.tar.gz", hash = "sha256:2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8"},
]
isort = [
{file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"},
{file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"},
{file = "isort-5.4.2-py3-none-any.whl", hash = "sha256:60a1b97e33f61243d12647aaaa3e6cc6778f5eb9f42997650f1cc975b6008750"},
{file = "isort-5.4.2.tar.gz", hash = "sha256:d488ba1c5a2db721669cc180180d5acf84ebdc5af7827f7aaeaa75f73cf0e2b8"},
]
jedi = [
{file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"},
{file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"},
]
marshmallow = [
{file = "marshmallow-3.7.1-py2.py3-none-any.whl", hash = "sha256:67bf4cae9d3275b3fc74bd7ff88a7c98ee8c57c94b251a67b031dc293ecc4b76"},
{file = "marshmallow-3.7.1.tar.gz", hash = "sha256:a2a5eefb4b75a3b43f05be1cca0b6686adf56af7465c3ca629e5ad8d1e1fe13d"},
]
marshmallow-enum = [
{file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"},
{file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
msal = [
{file = "msal-1.4.3-py2.py3-none-any.whl", hash = "sha256:82c0ca1103f4a040f3fa5325bfd6fb6c8273fbd1d6f7c1ea92bbc94fcc360c46"},
{file = "msal-1.4.3.tar.gz", hash = "sha256:51b8e8e0d918d9b4813f006324e7c4e21eb76268dd4c1a06d811a3475ad4ac57"},
]
msal-extensions = [
{file = "msal-extensions-0.2.2.tar.gz", hash = "sha256:31414753c484679bb3b6c6401623eb4c3ccab630af215f2f78c1d5c4f8e1d1a9"},
{file = "msal_extensions-0.2.2-py2.py3-none-any.whl", hash = "sha256:f092246787145ec96d6c3c9f7bedfb837830fe8a79b56180e531fbf28b8de532"},
]
msrest = [
{file = "msrest-0.6.18-py2.py3-none-any.whl", hash = "sha256:4993023011663b4273f15432fab75cc747dfa0bca1816d8122a7d1f9fdd9288d"},
{file = "msrest-0.6.18.tar.gz", hash = "sha256:5f4ef9b8cc207d93978b1a58f055179686b9f30a5e28041872db97a4a1c49b96"},
]
mypy = [
{file = "mypy-0.782-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:2c6cde8aa3426c1682d35190b59b71f661237d74b053822ea3d748e2c9578a7c"},
{file = "mypy-0.782-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9c7a9a7ceb2871ba4bac1cf7217a7dd9ccd44c27c2950edbc6dc08530f32ad4e"},
@ -727,6 +1068,10 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
oauthlib = [
{file = "oauthlib-3.1.0-py2.py3-none-any.whl", hash = "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea"},
{file = "oauthlib-3.1.0.tar.gz", hash = "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"},
]
paramiko = [
{file = "paramiko-2.7.1-py2.py3-none-any.whl", hash = "sha256:9c980875fa4d2cb751604664e9a2d0f69096643f5be4db1b99599fe114a97b2f"},
{file = "paramiko-2.7.1.tar.gz", hash = "sha256:920492895db8013f6cc0179293147f830b8c7b21fdfc839b6bad760c27459d9f"},
@ -743,6 +1088,10 @@ pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
portalocker = [
{file = "portalocker-1.7.1-py2.py3-none-any.whl", hash = "sha256:34cb36c618d88bcd9079beb36dcdc1848a3e3d92ac4eac59055bdeafc39f9d4a"},
{file = "portalocker-1.7.1.tar.gz", hash = "sha256:6d6f5de5a3e68c4dd65a98ec1babb26d28ccc5e770e07b672d65d5a35e4b2d8a"},
]
psutil = [
{file = "psutil-5.7.2-cp27-none-win32.whl", hash = "sha256:f2018461733b23f308c298653c8903d32aaad7873d25e1d228765e91ae42c3f2"},
{file = "psutil-5.7.2-cp27-none-win_amd64.whl", hash = "sha256:66c18ca7680a31bf16ee22b1d21b6397869dda8059dbdb57d9f27efa6615f195"},
@ -772,6 +1121,10 @@ pyflakes = [
{file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
{file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"},
]
pyjwt = [
{file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"},
{file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"},
]
pyls-black = [
{file = "pyls-black-0.4.6.tar.gz", hash = "sha256:33700e5ed605636ea7ba39188a1362d2f8602f7301f8f2b8544773886f965663"},
{file = "pyls_black-0.4.6-py3-none-any.whl", hash = "sha256:8f5fb8fed503588c10435d2d48e2c3751437f1bdb8116134b05a4591c4899940"},
@ -809,6 +1162,20 @@ python-language-server = [
{file = "python-language-server-0.34.1.tar.gz", hash = "sha256:b96ff466b5aa24e212493de863899298f229a9e250e7353972563c7f2495d23d"},
{file = "python_language_server-0.34.1-py3-none-any.whl", hash = "sha256:47dd678c261f8fc8af16ce738a637e49e0597ad5e4af10b76f4144ab2d96f247"},
]
pywin32 = [
{file = "pywin32-228-cp27-cp27m-win32.whl", hash = "sha256:37dc9935f6a383cc744315ae0c2882ba1768d9b06700a70f35dc1ce73cd4ba9c"},
{file = "pywin32-228-cp27-cp27m-win_amd64.whl", hash = "sha256:11cb6610efc2f078c9e6d8f5d0f957620c333f4b23466931a247fb945ed35e89"},
{file = "pywin32-228-cp35-cp35m-win32.whl", hash = "sha256:1f45db18af5d36195447b2cffacd182fe2d296849ba0aecdab24d3852fbf3f80"},
{file = "pywin32-228-cp35-cp35m-win_amd64.whl", hash = "sha256:6e38c44097a834a4707c1b63efa9c2435f5a42afabff634a17f563bc478dfcc8"},
{file = "pywin32-228-cp36-cp36m-win32.whl", hash = "sha256:ec16d44b49b5f34e99eb97cf270806fdc560dff6f84d281eb2fcb89a014a56a9"},
{file = "pywin32-228-cp36-cp36m-win_amd64.whl", hash = "sha256:a60d795c6590a5b6baeacd16c583d91cce8038f959bd80c53bd9a68f40130f2d"},
{file = "pywin32-228-cp37-cp37m-win32.whl", hash = "sha256:af40887b6fc200eafe4d7742c48417529a8702dcc1a60bf89eee152d1d11209f"},
{file = "pywin32-228-cp37-cp37m-win_amd64.whl", hash = "sha256:00eaf43dbd05ba6a9b0080c77e161e0b7a601f9a3f660727a952e40140537de7"},
{file = "pywin32-228-cp38-cp38-win32.whl", hash = "sha256:fa6ba028909cfc64ce9e24bcf22f588b14871980d9787f1e2002c99af8f1850c"},
{file = "pywin32-228-cp38-cp38-win_amd64.whl", hash = "sha256:9b3466083f8271e1a5eb0329f4e0d61925d46b40b195a33413e0905dccb285e8"},
{file = "pywin32-228-cp39-cp39-win32.whl", hash = "sha256:ed74b72d8059a6606f64842e7917aeee99159ebd6b8d6261c518d002837be298"},
{file = "pywin32-228-cp39-cp39-win_amd64.whl", hash = "sha256:8319bafdcd90b7202c50d6014efdfe4fde9311b3ff15fd6f893a45c0868de203"},
]
pyyaml = [
{file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"},
{file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"},
@ -845,6 +1212,15 @@ regex = [
{file = "regex-2020.7.14-cp38-cp38-win_amd64.whl", hash = "sha256:7a2dd66d2d4df34fa82c9dc85657c5e019b87932019947faece7983f2089a840"},
{file = "regex-2020.7.14.tar.gz", hash = "sha256:3a3af27a8d23143c49a3420efe5b3f8cf1a48c6fc8bc6856b03f638abc1833bb"},
]
requests = [
{file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"},
{file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"},
]
requests-oauthlib = [
{file = "requests-oauthlib-1.3.0.tar.gz", hash = "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"},
{file = "requests_oauthlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d"},
{file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"},
]
retry = [
{file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"},
{file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"},
@ -863,6 +1239,9 @@ spur = [
spurplus = [
{file = "spurplus-2.3.3.tar.gz", hash = "sha256:71c734a0827a68235d5b610c3570c3abc0c6be56708a340ba7bebc0a6eb77e92"},
]
stringcase = [
{file = "stringcase-1.2.0.tar.gz", hash = "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008"},
]
temppathlib = [
{file = "temppathlib-1.0.3.tar.gz", hash = "sha256:58eaea9190639591f5005289e128b3b822eb5a3341d538ffdb7e67a73526421a"},
]
@ -898,10 +1277,19 @@ typed-ast = [
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
]
typing-extensions = [
{file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
{file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
{file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
{file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
typing-inspect = [
{file = "typing_inspect-0.6.0-py2-none-any.whl", hash = "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0"},
{file = "typing_inspect-0.6.0-py3-none-any.whl", hash = "sha256:3b98390df4d999a28cf5b35d8b333425af5da2ece8a4ea9e98f71e7591347b4f"},
{file = "typing_inspect-0.6.0.tar.gz", hash = "sha256:8f1b1dd25908dbfd81d3bebc218011531e7ab614ba6e5bf7826d887c834afab7"},
]
ujson = [
{file = "ujson-1.35.tar.gz", hash = "sha256:f66073e5506e91d204ab0c614a148d5aa938bdbf104751be66f8ad7a222f5f86"},
]
urllib3 = [
{file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
{file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
]

Просмотреть файл

@ -12,15 +12,19 @@ pyyaml = "^5.3.1"
retry = "^0.9.2"
paramiko = "^2.7.1"
spurplus = "^2.3.3"
cerberus = "^1.3.2"
dataclasses-json = "^0.5.2"
# portalocker for compatibility of Windows, trigger pywin32 installed
portalocker = "^1.7.1"
azure-identity = {version = "^1.4.0", allow-prereleases = true}
azure-mgmt-resource = {version = "^15.0.0-beta.1", allow-prereleases = true}
[tool.poetry.dev-dependencies]
black = "^19.10b0"
flake8 = "^3.8.3"
flake8-black = "^0.2.1"
flake8-bugbear = "^20.1.4"
flake8-isort = "^3.0.1"
isort = "^4.3.21"
flake8-isort = "^4.0.0"
isort = "^5.4.2"
mypy = "^0.782"
pyls-black = "^0.4.6"
pyls-isort = "^0.1.1"