This commit is contained in:
Lili Deng 2023-09-01 10:13:31 +08:00 коммит произвёл LiliDeng
Родитель ff31aac2b0
Коммит 51d8d4e4f9
21 изменённых файлов: 1279 добавлений и 2 удалений

Просмотреть файл

@ -30,6 +30,19 @@ try:
except ModuleNotFoundError as e:
print(f"azure package is not installed. [{e}]")
# Baremetal modules
try:
import lisa.sut_orchestrator.baremetal.build # noqa: F401
import lisa.sut_orchestrator.baremetal.cluster.cluster # noqa: F401
import lisa.sut_orchestrator.baremetal.cluster.idrac # noqa: F401
import lisa.sut_orchestrator.baremetal.cluster.rackmanager # noqa: F401
import lisa.sut_orchestrator.baremetal.ip_getter # noqa: F401
import lisa.sut_orchestrator.baremetal.platform_ # noqa: F401
import lisa.sut_orchestrator.baremetal.readychecker # noqa: F401
import lisa.sut_orchestrator.baremetal.source # noqa: F401
except ModuleNotFoundError as e:
print(f"baremetal package is not installed. [{e}]")
# Aws modules
try:
import lisa.sut_orchestrator.aws.platform_ # noqa: F401

Просмотреть файл

@ -126,6 +126,7 @@ class OperatingSystem:
__debian_issue_pattern = re.compile(r"^([^ ]+) ?.*$")
__release_pattern = re.compile(r"^DISTRIB_ID='?([^ \n']+).*$", re.M)
__suse_release_pattern = re.compile(r"^(SUSE).*$", re.M)
__bmc_release_pattern = re.compile(r".*(wcscli).*$", re.M)
__posix_factory: Optional[Factory[Any]] = None
@ -208,6 +209,9 @@ class OperatingSystem:
@classmethod
def _get_detect_string(cls, node: Any) -> Iterable[str]:
typed_node: Node = node
cmd_result = typed_node.execute(cmd="wcscli", no_error_log=True)
yield get_matched_str(cmd_result.stdout, cls.__bmc_release_pattern)
cmd_result = typed_node.execute(cmd="lsb_release -d", no_error_log=True)
yield get_matched_str(cmd_result.stdout, cls.__lsb_release_pattern)
@ -663,6 +667,12 @@ class BSD(Posix):
...
class BMC(Posix):
@classmethod
def name_pattern(cls) -> Pattern[str]:
return re.compile("^wcscli$")
class MacOS(Posix):
@classmethod
def name_pattern(cls) -> Pattern[str]:

Просмотреть файл

@ -1311,7 +1311,7 @@ class Platform(TypedSchema, ExtendableSchemaMixin):
add_secret(self.admin_username, PATTERN_HEADTAIL)
add_secret(self.admin_password)
if self.type != constants.PLATFORM_READY:
if self.type not in [constants.PLATFORM_READY, constants.PLATFORM_BAREMETAL]:
if not self.admin_password and not self.admin_private_key_file:
raise LisaException(
"one of admin_password and admin_private_key_file must be set"

Просмотреть файл

@ -1,9 +1,10 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from lisa.util.constants import PLATFORM_READY
from lisa.util.constants import PLATFORM_BAREMETAL, PLATFORM_READY
AZURE = "azure"
QEMU = "qemu"
CLOUD_HYPERVISOR = "cloud-hypervisor"
AWS = "aws"
READY = PLATFORM_READY
BAREMETAL = PLATFORM_BAREMETAL

Просмотреть файл

Просмотреть файл

@ -0,0 +1,101 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import re
from pathlib import Path
from typing import Dict, List, Type
from smb.SMBConnection import SMBConnection # type: ignore
from lisa import schema
from lisa.util import ContextMixin, InitializableMixin, subclasses
from lisa.util.logger import get_logger
from .schema import BuildSchema, FileSchema, SMBBuildSchema
class Build(subclasses.BaseClassWithRunbookMixin, ContextMixin, InitializableMixin):
def __init__(self, runbook: BuildSchema) -> None:
super().__init__(runbook=runbook)
self._log = get_logger("cluster", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return BuildSchema
def copy(self, sources_path: List[Path], files_map: List[FileSchema]) -> None:
raise NotImplementedError()
class SMBBuild(Build):
def __init__(self, runbook: SMBBuildSchema) -> None:
super().__init__(runbook)
self.smb_runbook: SMBBuildSchema = self.runbook
@classmethod
def type_name(cls) -> str:
return "smb"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return SMBBuildSchema
def copy(self, sources_path: List[Path], files_map: List[FileSchema]) -> None:
username = self.smb_runbook.username
password = self.smb_runbook.password
server_name = self.smb_runbook.server_name
share_name = self.smb_runbook.share
with SMBConnection(
username=username,
password=password,
my_name="",
remote_name=server_name,
) as conn:
conn.connect(server_name)
for file, file_map in self._find_matched_files(
sources_path, files_map
).items():
with open(file, "rb") as f:
if file_map.destination:
attrs = conn.getAttributes(
share_name, file_map.destination, timeout=30
)
if attrs.isDirectory:
file_name = (
file_map.destination + "\\" + file.rsplit("\\")[-1]
)
else:
file_name = file_map.destination
conn.storeFile(share_name, file_name, f)
else:
file_name = file.rsplit("\\")[-1]
conn.storeFile(
share_name,
file_name,
f,
)
self._log.debug(f"copy file {file} to {share_name}\\{file_name}")
def _find_matched_files(
self, sources_path: List[Path], files_map: List[FileSchema]
) -> Dict[str, FileSchema]:
all_files = []
match_files: Dict[str, FileSchema] = {}
for source_path in sources_path:
for root, _, files in os.walk(source_path):
for file in files:
all_files.append(os.path.join(root, file))
for file_map in files_map:
file_path = rf"{source_path}\{file_map.source}".replace("\\", "\\\\")
pattern = re.compile(
file_path,
re.I | re.M,
)
for file in all_files:
if pattern.match(file):
match_files[file] = file_map
return match_files

Просмотреть файл

Просмотреть файл

@ -0,0 +1,37 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Any, Type
from lisa import features, schema
from lisa.environment import Environment
from lisa.util import InitializableMixin, subclasses
from lisa.util.logger import get_logger
from ..schema import ClusterSchema
class Cluster(subclasses.BaseClassWithRunbookMixin, InitializableMixin):
def __init__(
self,
runbook: ClusterSchema,
) -> None:
super().__init__(runbook=runbook)
self.cluster_runbook: ClusterSchema = self.runbook
self._log = get_logger("cluster", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return ClusterSchema
def deploy(self, environment: Environment) -> Any:
raise NotImplementedError()
def has_serial_console(self) -> bool:
raise NotImplementedError()
def get_serial_console(self) -> Type[features.SerialConsole]:
raise NotImplementedError()
def get_start_stop(self) -> Type[features.StartStop]:
raise NotImplementedError()

Просмотреть файл

@ -0,0 +1,186 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import time
import xml.etree.ElementTree as ETree
from typing import Any, Type
import redfish # type: ignore
from assertpy import assert_that
from lisa import features, schema
from lisa.environment import Environment
from lisa.util import LisaException
from lisa.util.logger import get_logger
from lisa.util.perf_timer import create_timer
from ..platform_ import BareMetalPlatform
from ..schema import ClusterSchema, IdracSchema
from .cluster import Cluster
class IdracStartStop(features.StartStop):
def _login(self) -> None:
platform: BareMetalPlatform = self._platform # type: ignore
self.cluster: Idrac = platform.cluster # type: ignore
self.cluster.login()
def _logout(self) -> None:
platform: BareMetalPlatform = self._platform # type: ignore
self.cluster = platform.cluster # type: ignore
self.cluster.logout()
def _stop(
self, wait: bool = True, state: features.StopState = features.StopState.Shutdown
) -> None:
if state == features.StopState.Hibernate:
raise NotImplementedError(
"baremetal orchestrator does not support hibernate stop"
)
self._login()
if self.cluster.get_power_state() == "Off":
self._log.debug("System is already off.")
return
self.cluster.reset("GracefulShutdown")
self._logout()
def _start(self, wait: bool = True) -> None:
self._login()
if self.cluster.get_power_state() == "On":
self._log.debug("System is already powered on.")
return
self.cluster.reset("On")
self._logout()
def _restart(self, wait: bool = True) -> None:
self._login()
self.cluster.reset("ForceRestart")
self._logout()
class Idrac(Cluster):
def __init__(self, runbook: ClusterSchema) -> None:
super().__init__(runbook)
self.idrac_runbook: IdracSchema = self.runbook
self._log = get_logger("idrac", self.__class__.__name__)
assert_that(len(self.idrac_runbook.client)).described_as(
"only one client is supported for idrac, don't specify more than one client"
).is_equal_to(1)
self.client = self.idrac_runbook.client[0]
@classmethod
def type_name(cls) -> str:
return "idrac"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return IdracSchema
def get_start_stop(self) -> Type[features.StartStop]:
return IdracStartStop
def deploy(self, environment: Environment) -> Any:
self.login()
self._eject_virtual_media()
self._change_boot_order_once("VCD-DVD")
assert self.client.iso_http_url, "iso_http_url is required for idrac client"
if self.get_power_state() == "Off":
self._log.debug("System is already off.")
else:
self.reset("GracefulShutdown")
self._insert_virtual_media(self.client.iso_http_url)
self.reset("On")
self.logout()
def reset(self, operation: str) -> None:
body = {"ResetType": operation}
response = self.redfish_instance.post(
"/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset",
body=body,
)
self._wait_for_completion(response)
self._log.debug(f"{operation} initiated successfully.")
def get_power_state(self) -> str:
response = self.redfish_instance.get(
"/redfish/v1/Systems/System.Embedded.1/",
)
return str(response.dict["PowerState"])
def login(self) -> None:
self.redfish_instance = redfish.redfish_client(
base_url="https://" + self.idrac_runbook.address,
username=self.idrac_runbook.username,
password=self.idrac_runbook.password,
)
self.redfish_instance.login(auth="session")
self._log.debug(f"Login to {self.redfish_instance.get_base_url()} successful.")
def logout(self) -> None:
self._log.debug("Logging out...")
self.redfish_instance.logout()
def _wait_for_completion(self, response: Any, timeout: int = 600) -> None:
if response.is_processing:
task = response.monitor(self.redfish_instance)
timer = create_timer()
while task.is_processing and timer.elapsed(False) < timeout:
retry_time = task.retry_after
time.sleep(retry_time if retry_time else 5)
task = response.monitor(self.redfish_instance)
if response.status not in [200, 202, 204]:
raise LisaException("Failed to complete task! - status:", response.status)
def _insert_virtual_media(self, iso_http_url: str) -> None:
self._log.debug("Inserting virtual media...")
body = {"Image": iso_http_url}
response = self.redfish_instance.post(
"/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia/CD/Actions/"
"VirtualMedia.InsertMedia",
body=body,
)
self._wait_for_completion(response)
self._log.debug("Inserting virtual media completed...")
def _eject_virtual_media(self) -> None:
self._log.debug("Ejecting virtual media...")
response = self.redfish_instance.post(
"/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia/CD/Actions/"
"VirtualMedia.EjectMedia",
body={},
)
# Ignore return on failure as it is ok if no media was attached
if response.status in [200, 202, 204]:
self._wait_for_completion(response)
def _change_boot_order_once(self, boot_from: str) -> None:
self._log.debug(f"Updating boot source to {boot_from}")
sys_config = ETree.Element("SystemConfiguration")
component = ETree.SubElement(
sys_config, "Component", {"FQDD": "iDRAC.Embedded.1"}
)
boot_once_attribute = ETree.SubElement(
component, "Attribute", {"Name": "VirtualMedia.1#BootOnce"}
)
boot_once_attribute.text = "Enabled"
first_boot_attribute = ETree.SubElement(
component, "Attribute", {"Name": "ServerBoot.1#FirstBootDevice"}
)
first_boot_attribute.text = boot_from
import_buffer = ETree.tostring(
sys_config, encoding="utf8", method="html"
).decode()
body = {"ShareParameters": {"Target": "ALL"}, "ImportBuffer": import_buffer}
response = self.redfish_instance.post(
"/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/"
"EID_674_Manager.ImportSystemConfiguration",
body=body,
)
self._log.debug("Waiting for boot order override task to complete...")
self._wait_for_completion(response)
self._log.debug(f"Updating boot source to {boot_from} completed")

Просмотреть файл

@ -0,0 +1,72 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Any, Type
from lisa import features, schema
from lisa.environment import Environment
from lisa.node import quick_connect
from lisa.util.logger import get_logger
from ..platform_ import BareMetalPlatform
from ..schema import RackManagerSchema
from .cluster import Cluster
class RackManagerStartStop(features.StartStop):
def init_rack_manager(self) -> None:
platform: BareMetalPlatform = self._platform # type: ignore
self.cluster: RackManager = platform.cluster # type: ignore
def _stop(
self, wait: bool = True, state: features.StopState = features.StopState.Shutdown
) -> None:
if state == features.StopState.Hibernate:
raise NotImplementedError(
"baremetal orchestrator does not support hibernate stop"
)
self.init_rack_manager()
self.cluster.reset("off")
def _start(self, wait: bool = True) -> None:
self.init_rack_manager()
self.cluster.reset("on")
def _restart(self, wait: bool = True) -> None:
self.init_rack_manager()
self.cluster.reset("reset")
class RackManager(Cluster):
def __init__(self, runbook: RackManagerSchema) -> None:
super().__init__(runbook)
self.rm_runbook: RackManagerSchema = self.runbook
self._log = get_logger("rackmanager", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "rackmanager"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return RackManagerSchema
def get_start_stop(self) -> Type[features.StartStop]:
return RackManagerStartStop
def deploy(self, environment: Environment) -> Any:
self.reset("off")
self.reset("on")
def reset(self, operation: str) -> None:
assert self.rm_runbook.connection, "connection is required for rackmanager"
self.rm_runbook.connection.name = "rackmanager"
rm_node = quick_connect(self.rm_runbook.connection, logger_name="rackmanager")
assert self.rm_runbook.client, "client is required for rackmanager"
for client in self.rm_runbook.client:
assert (
client.management_port
), "management_port is required for rackmanager client"
rm_node.execute(f"set system {operation} -i {client.management_port}")
self._log.debug(f"client has been {operation} successfully")

Просмотреть файл

@ -0,0 +1,34 @@
from dataclasses import dataclass
from lisa import schema
from lisa.environment import Environment
from lisa.node import Node
from .build import Build
@dataclass
class EnvironmentContext:
ssh_public_key: str = ""
@dataclass
class NodeContext:
connection: schema.ConnectionInfo = schema.ConnectionInfo(password="mock")
@dataclass
class BuildContext:
is_copied: bool = False
def get_environment_context(environment: Environment) -> EnvironmentContext:
return environment.get_context(EnvironmentContext)
def get_node_context(node: Node) -> NodeContext:
return node.get_context(NodeContext)
def get_build_context(build: Build) -> BuildContext:
return build.get_context(BuildContext)

Просмотреть файл

@ -0,0 +1,39 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import TYPE_CHECKING, Any
from lisa import schema
from lisa.feature import Feature
if TYPE_CHECKING:
from .platform_ import BareMetalPlatform
class ClusterFeature(Feature):
def __getattr__(self, key: str) -> Any:
assert self._inner, "inner is not set"
return getattr(self._inner, key)
def _initialize(self, *args: Any, **kwargs: Any) -> None:
_feature_type = self._get_inner_type()
self._inner = _feature_type(
schema.FeatureSettings.create(_feature_type.name()),
self._node,
self._platform,
*args,
**kwargs,
)
class StartStop(ClusterFeature):
def _get_inner_type(self) -> Feature:
platform: BareMetalPlatform = self._platform # type: ignore
return platform.cluster.get_start_stop() # type: ignore
class SerialConsole(ClusterFeature):
def _get_inner_type(self) -> Feature:
platform: BareMetalPlatform = self._platform # type: ignore
return platform.cluster.get_serial_console() # type: ignore

Просмотреть файл

@ -0,0 +1,68 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import re
from dataclasses import dataclass
from typing import Type
from dataclasses_json import dataclass_json
from lisa import schema
from lisa.util import InitializableMixin, get_matched_str, subclasses
from lisa.util.logger import get_logger
from .schema import IpGetterSchema
class IpGetterChecker(subclasses.BaseClassWithRunbookMixin, InitializableMixin):
def __init__(
self,
runbook: IpGetterSchema,
) -> None:
super().__init__(runbook=runbook)
self.ip_getter_runbook: IpGetterSchema = self.runbook
self._log = get_logger("ip_getter", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return IpGetterSchema
def get_ip(self) -> str:
raise NotImplementedError()
@dataclass_json()
@dataclass
class FileSingleSchema(IpGetterSchema):
file: str = ""
class FileSingleChecker(IpGetterChecker):
# ipaddr=X.XX.XXX.X
__ip_addr_regex = re.compile(r"(?P<ip_addr>[\d.]+)", re.M)
def __init__(
self,
runbook: FileSingleSchema,
) -> None:
super().__init__(runbook=runbook)
self.file_single_runbook: FileSingleSchema = self.runbook
self._log = get_logger("file_single", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "file_single"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return FileSingleSchema
def get_ip(self) -> str:
with open(self.file_single_runbook.file) as f:
lines = f.readlines()
matched = get_matched_str(" ".join(lines), self.__ip_addr_regex, True)
assert matched is not None, (
f"Could not get ip from content of file {self.file_single_runbook.file}"
f" {' '.join(lines)}"
)
return matched

Просмотреть файл

@ -0,0 +1,69 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import re
from dataclasses import dataclass
from pathlib import Path
from typing import List, Type
from dataclasses_json import dataclass_json
from lisa import schema
from lisa.util import InitializableMixin, subclasses
from lisa.util.logger import get_logger
from .schema import KeyLoaderSchema
class KeyLoader(subclasses.BaseClassWithRunbookMixin, InitializableMixin):
def __init__(
self,
runbook: KeyLoaderSchema,
) -> None:
super().__init__(runbook=runbook)
self.key_loader_runbook: KeyLoaderSchema = self.runbook
self._log = get_logger("key_loader", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return KeyLoaderSchema
def load_key(self, sources_path: List[Path]) -> str:
raise NotImplementedError()
@dataclass_json()
@dataclass
class BuildSchema(KeyLoaderSchema):
file: str = ""
pattern: str = "id_rsa.*"
class BuildLoader(KeyLoader):
def __init__(
self,
runbook: BuildSchema,
) -> None:
super().__init__(runbook=runbook)
self.key_file_runbook: BuildSchema = self.runbook
self._log = get_logger("build", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "build"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return BuildSchema
def load_key(self, sources_path: List[Path]) -> str:
pattern = re.compile(
self.key_file_runbook.pattern,
re.I | re.M,
)
for source_path in sources_path:
for filename in os.listdir(source_path):
if pattern.match(filename):
return os.path.join(source_path, filename)
return ""

Просмотреть файл

@ -0,0 +1,233 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from pathlib import Path
from typing import Any, List, Optional, Type
from lisa import RemoteNode, feature, schema, search_space
from lisa.environment import Environment
from lisa.platform_ import Platform
from lisa.util import fields_to_dict
from lisa.util.logger import Logger
from lisa.util.shell import try_connect
from lisa.util.subclasses import Factory
from .. import BAREMETAL
from .build import Build
from .cluster.cluster import Cluster
from .context import get_build_context, get_node_context
from .features import SerialConsole, StartStop
from .ip_getter import IpGetterChecker
from .key_loader import KeyLoader
from .readychecker import ReadyChecker
from .schema import BareMetalPlatformSchema, BuildSchema
from .source import Source
class BareMetalPlatform(Platform):
def __init__(
self,
runbook: schema.Platform,
) -> None:
super().__init__(runbook=runbook)
@classmethod
def type_name(cls) -> str:
return BAREMETAL
@classmethod
def supported_features(cls) -> List[Type[feature.Feature]]:
return [StartStop, SerialConsole]
def _initialize(self, *args: Any, **kwargs: Any) -> None:
baremetal_runbook: BareMetalPlatformSchema = self.runbook.get_extended_runbook(
BareMetalPlatformSchema
)
assert baremetal_runbook, "platform runbook cannot be empty"
self._baremetal_runbook = baremetal_runbook
self.local_artifacts_path: Optional[List[Path]] = None
self.ready_checker_factory = Factory[ReadyChecker](ReadyChecker)
self.cluster_factory = Factory[Cluster](Cluster)
self.ip_getter_factory = Factory[IpGetterChecker](IpGetterChecker)
self.key_loader_factory = Factory[KeyLoader](KeyLoader)
self.source_factory = Factory[Source](Source)
self.build_factory = Factory[Build](Build)
def _prepare_environment(self, environment: Environment, log: Logger) -> bool:
return self._configure_node_capabilities(environment, log)
def _deploy_environment(self, environment: Environment, log: Logger) -> None:
# currently only support one cluster
assert self._baremetal_runbook.cluster, "no cluster is specified in the runbook"
cluster_instance = self._baremetal_runbook.cluster[0]
self.cluster = self.cluster_factory.create_by_runbook(cluster_instance)
assert self.cluster.runbook.client, "no client is specified in the runbook"
# copy build (shared, check if it's copied)
if self._baremetal_runbook.source:
if not self.local_artifacts_path:
source = self.source_factory.create_by_runbook(
self._baremetal_runbook.source
)
self._log.debug(f"source build '{source.type_name()}'")
self.local_artifacts_path = source.download()
else:
self._log.debug(
"build source has been downloaded in "
f"'{self.local_artifacts_path}',"
" skip download again"
)
else:
self._log.debug("no build source is specified in the runbook")
ready_checker: Optional[ReadyChecker] = None
# ready checker cleanup
if cluster_instance.ready_checker:
ready_checker = self.ready_checker_factory.create_by_runbook(
cluster_instance.ready_checker
)
ready_checker.clean_up()
# copy build if source exists
if self.cluster.runbook.build:
build = self.build_factory.create_by_runbook(self.cluster.runbook.build)
build_context = get_build_context(build)
if build_context.is_copied:
self._log.debug("build is already copied, skip copy")
else:
assert self.local_artifacts_path, "no build source is specified"
self.copy(
self.cluster.runbook.build, sources_path=self.local_artifacts_path
)
build_context.is_copied = True
if self.cluster.runbook.key_loader:
key_loader = self.key_loader_factory.create_by_runbook(
self.cluster.runbook.key_loader
)
if self.local_artifacts_path:
key_file = key_loader.load_key(self.local_artifacts_path)
assert environment.runbook.nodes_requirement, "no node is specified"
for node_space in environment.runbook.nodes_requirement:
assert isinstance(
node_space, schema.NodeSpace
), f"actual: {type(node_space)}"
environment.create_node_from_requirement(node_space)
for index, node in enumerate(environment.nodes.list()):
node_context = get_node_context(node)
if (
not self.cluster.runbook.client[index].connection.password
and self.cluster.runbook.client[index].connection.private_key_file == ""
):
self.cluster.runbook.client[
index
].connection.private_key_file = key_file
connection_info = schema.ConnectionInfo(
address=self.cluster.runbook.client[index].connection.address,
port=self.cluster.runbook.client[index].connection.port,
username=self.cluster.runbook.client[index].connection.username,
private_key_file=self.cluster.runbook.client[
index
].connection.private_key_file,
password=self.cluster.runbook.client[index].connection.password,
)
node_context.connection = connection_info
index = index + 1
# deploy cluster
self.cluster.deploy(environment)
if cluster_instance.ready_checker:
ready_checker = self.ready_checker_factory.create_by_runbook(
cluster_instance.ready_checker
)
for index, node in enumerate(environment.nodes.list()):
node_context = get_node_context(node)
# ready checker
if ready_checker:
ready_checker.is_ready(node)
# get ip address
if cluster_instance.ip_getter:
ip_getter = self.ip_getter_factory.create_by_runbook(
cluster_instance.ip_getter
)
node_context.connection.address = ip_getter.get_ip()
assert isinstance(node, RemoteNode), f"actual: {type(node)}"
node.name = f"node_{index}"
node.set_connection_info(
**fields_to_dict(
node_context.connection,
["address", "port", "username", "password", "private_key_file"],
),
)
try_connect(connection_info)
self._log.debug(f"deploy environment {environment.name} successfully")
def copy(self, build_schema: BuildSchema, sources_path: List[Path]) -> None:
if sources_path:
build = self.build_factory.create_by_runbook(build_schema)
build.copy(
sources_path=sources_path,
files_map=build_schema.files,
)
else:
self._log.debug("no copied source path specified, skip copy")
def _configure_node_capabilities(
self, environment: Environment, log: Logger
) -> bool:
if not environment.runbook.nodes_requirement:
return True
nodes_capabilities = self._create_node_capabilities()
nodes_requirement = []
for node_space in environment.runbook.nodes_requirement:
if not node_space.check(nodes_capabilities):
return False
node_requirement = node_space.generate_min_capability(nodes_capabilities)
nodes_requirement.append(node_requirement)
environment.runbook.nodes_requirement = nodes_requirement
return True
def _create_node_capabilities(self) -> schema.NodeSpace:
node_capabilities = schema.NodeSpace()
node_capabilities.name = "baremetal"
node_capabilities.node_count = 1
node_capabilities.core_count = search_space.IntRange(min=1, max=1)
node_capabilities.disk = schema.DiskOptionSettings(
data_disk_count=search_space.IntRange(min=0),
data_disk_size=search_space.IntRange(min=1),
)
node_capabilities.network_interface = schema.NetworkInterfaceOptionSettings()
node_capabilities.network_interface.max_nic_count = 1
node_capabilities.network_interface.nic_count = 1
node_capabilities.network_interface.data_path = search_space.SetSpace[
schema.NetworkDataPath
](
is_allow_set=True,
items=[schema.NetworkDataPath.Sriov, schema.NetworkDataPath.Synthetic],
)
node_capabilities.gpu_count = 0
node_capabilities.features = search_space.SetSpace[schema.FeatureSettings](
is_allow_set=True,
items=[
schema.FeatureSettings.create(SerialConsole.name()),
schema.FeatureSettings.create(StartStop.name()),
],
)
return node_capabilities

Просмотреть файл

@ -0,0 +1,119 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from dataclasses import dataclass
from typing import Type, cast
from dataclasses_json import dataclass_json
from lisa import schema
from lisa.node import Node, RemoteNode
from lisa.util import InitializableMixin, check_till_timeout, fields_to_dict, subclasses
from lisa.util.logger import get_logger
from lisa.util.shell import try_connect
from .context import get_node_context
from .schema import ReadyCheckerSchema
class ReadyChecker(subclasses.BaseClassWithRunbookMixin, InitializableMixin):
def __init__(
self,
runbook: ReadyCheckerSchema,
) -> None:
super().__init__(runbook=runbook)
self.ready_checker_runbook: ReadyCheckerSchema = self.runbook
self._log = get_logger("ready_checker", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return ReadyCheckerSchema
def clean_up(self) -> None:
pass
def is_ready(self, node: Node) -> bool:
return False
@dataclass_json()
@dataclass
class FileSingleSchema(ReadyCheckerSchema):
file: str = ""
class FileSingleChecker(ReadyChecker):
def __init__(
self,
runbook: FileSingleSchema,
) -> None:
super().__init__(runbook=runbook)
self.file_single_runbook: FileSingleSchema = self.runbook
self._log = get_logger("file_single", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "file_single"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return FileSingleSchema
def clean_up(self) -> None:
if os.path.exists(self.file_single_runbook.file):
os.remove(self.file_single_runbook.file)
self._log.debug(
f"The file {self.file_single_runbook.file} has been removed"
)
else:
self._log.debug(
f"The file {self.file_single_runbook.file} does not exist,"
" so it doesn't need to be cleaned up."
)
def is_ready(self, node: Node) -> bool:
check_till_timeout(
lambda: os.path.exists(self.file_single_runbook.file) is True,
timeout_message="wait for ready check ready",
timeout=self.file_single_runbook.timeout,
)
return os.path.exists(self.file_single_runbook.file)
@dataclass_json()
@dataclass
class SshSchema(ReadyCheckerSchema):
...
class SshChecker(ReadyChecker):
def __init__(
self,
runbook: SshSchema,
) -> None:
super().__init__(runbook=runbook)
self.ssh_runbook: SshSchema = self.runbook
self._log = get_logger("ssh", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "ssh"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return SshSchema
def is_ready(self, node: Node) -> bool:
context = get_node_context(node)
remote_node = cast(RemoteNode, node)
remote_node.set_connection_info(
**fields_to_dict(
context.connection,
["address", "port", "username", "password", "private_key_file"],
),
)
self._log.debug("try to connect to the client")
try_connect(context.connection, ssh_timeout=self.ssh_runbook.timeout)
self._log.debug("client has been connected successfully")
return True

Просмотреть файл

@ -0,0 +1,155 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from dataclasses import dataclass, field
from typing import Any, List, Optional
from dataclasses_json import dataclass_json
from lisa import schema
from lisa.secret import PATTERN_HEADTAIL, add_secret
from lisa.util import field_metadata
@dataclass_json()
@dataclass
class ClientSchema:
connection: Optional[schema.RemoteNode] = field(
default=None, metadata=field_metadata(required=True)
)
@dataclass_json()
@dataclass
class RackManagerClientSchema(ClientSchema):
management_port: Optional[int] = field(default=-1)
@dataclass_json()
@dataclass
class IdracClientSchema(ClientSchema):
iso_http_url: Optional[str] = field(default="")
@dataclass_json()
@dataclass
class ReadyCheckerSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="file_single", metadata=field_metadata(required=True))
timeout: int = 300
@dataclass_json()
@dataclass
class FileSchema:
source: str = field(default="")
destination: Optional[str] = field(default="")
@dataclass_json()
@dataclass
class BuildSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="smb", metadata=field_metadata(required=True))
name: str = ""
share: str = ""
files: List[FileSchema] = field(default_factory=list)
@dataclass_json()
@dataclass
class IpGetterSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="file_single", metadata=field_metadata(required=True))
@dataclass_json()
@dataclass
class KeyLoaderSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="build", metadata=field_metadata(required=True))
@dataclass_json()
@dataclass
class ClusterSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="rackmanager", metadata=field_metadata(required=True))
build: Optional[BuildSchema] = None
ready_checker: Optional[ReadyCheckerSchema] = None
ip_getter: Optional[IpGetterSchema] = None
key_loader: Optional[KeyLoaderSchema] = None
@dataclass_json()
@dataclass
class SourceSchema(schema.TypedSchema, schema.ExtendableSchemaMixin):
type: str = field(default="ado", metadata=field_metadata(required=True))
name: str = ""
@dataclass_json()
@dataclass
class Artifact:
artifact_name: str = ""
extract: bool = True
@dataclass_json()
@dataclass
class ADOSourceSchema(SourceSchema):
organization_url: str = field(default="", metadata=field_metadata(required=True))
project: str = field(default="", metadata=field_metadata(required=True))
build_id: int = 0
pipeline_name: str = ""
pat: str = field(default="", metadata=field_metadata(required=True))
artifacts: List[Artifact] = field(default_factory=list)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
if not self.organization_url:
raise ValueError("organization_url cannot be empty")
if not self.project:
raise ValueError("project cannot be empty")
if not self.pat:
raise ValueError("pat cannot be empty")
if not self.artifacts:
raise ValueError("artifacts cannot be empty")
if not self.build_id and not self.pipeline_name:
raise ValueError("build_id and pipeline_name are both empty")
add_secret(self.pat)
@dataclass_json()
@dataclass
class SMBBuildSchema(BuildSchema):
username: str = ""
password: str = ""
share: str = ""
server_name: str = ""
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
add_secret(self.username, PATTERN_HEADTAIL)
add_secret(self.password)
@dataclass_json()
@dataclass
class IdracSchema(ClusterSchema):
address: str = ""
username: str = ""
password: str = ""
client: List[IdracClientSchema] = field(default_factory=list)
def __post_init__(self, *args: Any, **kwargs: Any) -> None:
add_secret(self.username, PATTERN_HEADTAIL)
add_secret(self.password)
@dataclass_json()
@dataclass
class RackManagerSchema(ClusterSchema):
connection: Optional[schema.RemoteNode] = field(
default=None, metadata=field_metadata(required=True)
)
client: List[RackManagerClientSchema] = field(default_factory=list)
@dataclass_json()
@dataclass
class BareMetalPlatformSchema:
source: Optional[SourceSchema] = field(default=None)
cluster: List[ClusterSchema] = field(default_factory=list)

Просмотреть файл

@ -0,0 +1,132 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import re
import zipfile
from pathlib import Path
from typing import List, Type
import requests
from assertpy import assert_that
from azure.devops.connection import Connection # type: ignore
from msrest.authentication import BasicAuthentication
from lisa import schema
from lisa.util import InitializableMixin, constants, get_matched_str, subclasses
from lisa.util.logger import get_logger
from .schema import ADOSourceSchema, SourceSchema
class Source(subclasses.BaseClassWithRunbookMixin, InitializableMixin):
def __init__(self, runbook: SourceSchema) -> None:
super().__init__(runbook=runbook)
self._log = get_logger("source", self.__class__.__name__)
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return SourceSchema
def download(self, timeout: int = 600) -> List[Path]:
raise NotImplementedError()
class ADOSource(Source):
__file_format = re.compile(r"format=(?P<format>.*)", re.M)
def __init__(self, runbook: ADOSourceSchema) -> None:
super().__init__(runbook)
self.ado_runbook: ADOSourceSchema = self.runbook
self._log = get_logger("ado", self.__class__.__name__)
@classmethod
def type_name(cls) -> str:
return "ado"
@classmethod
def type_schema(cls) -> Type[schema.TypedSchema]:
return ADOSourceSchema
def download(self, timeout: int = 600) -> List[Path]:
personal_access_token = self.ado_runbook.pat
organization_url = self.ado_runbook.organization_url
project_name = self.ado_runbook.project
artifacts = self.ado_runbook.artifacts
build_id = self.ado_runbook.build_id
pipeline_name = self.ado_runbook.pipeline_name
working_path = constants.RUN_LOCAL_WORKING_PATH
credentials = BasicAuthentication("", personal_access_token)
connection = Connection(base_url=organization_url, creds=credentials)
pipeline_client = connection.clients.get_pipelines_client()
pipelines = pipeline_client.list_pipelines(project_name)
if pipeline_name:
found_pipeline = False
pipeline = None
for pipeline in pipelines:
if pipeline.name == pipeline_name:
found_pipeline = True
break
assert_that(found_pipeline).described_as(
(
f"cannot found pipeline {pipeline_name} in project {project_name}, "
"please double check the names"
)
).is_true()
assert pipeline is not None, "pipeline cannot be None"
pipeline_runs = pipeline_client.list_runs(
pipeline_id=pipeline.id, project=project_name
)
assert_that(len(pipeline_runs)).described_as(
f"no runs found for pipeline {pipeline_name}"
).is_not_zero()
pipeline_run = [
run
for run in pipeline_runs
if run.result == "succeeded" and run.state == "completed"
]
assert_that(len(pipeline_run)).described_as(
f"no succeeded and completed run found for pipeline {pipeline_name}"
).is_not_zero()
build_id = pipeline_run[0].id
build_client = connection.clients.get_build_client()
artifacts_path: List[Path] = []
for artifact in artifacts:
artifact_name = artifact.artifact_name
build_artifact = build_client.get_artifact(
project_name, build_id, artifact_name
)
download_url = build_artifact.resource.download_url
self._log.debug(f"artifact download url: {download_url}")
working_path.mkdir(parents=True, exist_ok=True)
file_extension = get_matched_str(download_url, self.__file_format)
artifact_path = working_path / f"{build_artifact.name}.{file_extension}"
self._log.debug(f"start to download artifact to {artifact_path}")
with open(
artifact_path,
"wb",
) as download_file:
response = requests.get(
download_url, auth=("", personal_access_token), timeout=timeout
)
download_file.write(response.content)
self._log.debug(f"downloaded artifact to {artifact_path}")
if artifact.extract:
source_path = self.extract(artifact_path)
artifacts_path.append(Path(source_path))
else:
artifacts_path.append(artifact_path)
return artifacts_path
def extract(self, artifact_path: Path) -> str:
file_extension = artifact_path.suffix
if file_extension == ".zip":
with zipfile.ZipFile(str(artifact_path), "r") as zip_ref:
zip_ref.extractall(str(artifact_path.parent))
source_path = os.path.splitext(str(artifact_path))[0]
self._log.info(f"Artifact extracted to {str(source_path)}")
return source_path

Просмотреть файл

@ -110,6 +110,7 @@ SECURITY_PROFILE_CVM = "cvm"
PLATFORM = "platform"
PLATFORM_READY = "ready"
PLATFORM_BAREMETAL = "baremetal"
PLATFORM_MOCK = "mock"
TESTCASE = "testcase"

Просмотреть файл

@ -101,6 +101,7 @@ def pylint(session: nox.Session) -> None:
*NOX_DEPENDENCIES,
*OPTIONAL_DEPENDENCIES["aws"],
*OPTIONAL_DEPENDENCIES["azure"],
*OPTIONAL_DEPENDENCIES["baremetal"],
*OPTIONAL_DEPENDENCIES["libvirt"],
*OPTIONAL_DEPENDENCIES["pylint"],
*OPTIONAL_DEPENDENCIES["typing"],

Просмотреть файл

@ -94,6 +94,12 @@ libvirt = [
"pycdlib ~= 1.12.0; platform_system != 'Windows'",
]
baremetal = [
"azure-devops ~= 7.1.0b3",
"pysmb ~= 1.2.9.1",
"redfish ~= 3.2.1",
]
mypy = [
"mypy == 0.942",
]