Add CH performance metric tests (#2276)

* kata conformance automation

* Flak8 formatting changes

* isort fix

* changes for CH perf metric test

* change for making azure package optional in poetry.lock and removing unneccessary lisa tools from kata conf test

* formating wth make fix

* PR review ==> remove CH perf metric changes, Go tool version changes, Path changes in shell.py, log changes in testsuite, made azure packge optional

* CH Perf metrics testcase integration

* remove kata related changeg to separate the CH perf PR

* remove git related changes to separate the CH perf PR

* PR review changes : Added Error in subtest, created different function to run perf-metrics test

* flak8 and isort formating

* PR Review change suggested by Anirudh, removal of unwanted logging, adding regex string for referral, removed unwanted comment

* PR Review suggestions by Anirudh : Dataclass implementation, report file to logpath

* removal of unwanted log

* lofa path change fore report file

* PR Review by Anirudh : nit changes

* PR Review by Chi: Pushed notification logic to single loop in _create_perf_metric_report function

* flak8 isort fix

* Change for quick send of test msg result and removal of dumping result to json file

* Changes to remove unused import,white spaces and BLK100

* flak8-isort change : add arg's datatype for function

* PR Review by Chi : return code handling change, log level change in case of exception to info

* removal of dataclass CHPerfMetricTestResult

* Changes for regex to get metrics

* Removal of result dict and using variable

* BLK100 Black format changes

* addition of None check in regex search
This commit is contained in:
Smit Gardhariya 2022-10-12 16:07:38 +05:30 коммит произвёл GitHub
Родитель 82c04c6d6e
Коммит 3be710535f
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
2 изменённых файлов: 127 добавлений и 6 удалений

Просмотреть файл

@ -96,6 +96,25 @@ class CloudHypervisorTestSuite(TestSuite):
result, environment, "integration-live-migration", hypervisor
)
@TestCaseMetadata(
description="""
Runs cloud-hypervisor performance metrics tests.
""",
priority=3,
)
def verify_cloud_hypervisor_performance_metrics_tests(
self,
log: Logger,
node: Node,
environment: Environment,
log_path: Path,
result: TestResult,
) -> None:
hypervisor = self._get_hypervisor_param(node)
node.tools[CloudHypervisorTests].run_metrics_tests(
result, environment, hypervisor, log_path
)
def _ensure_virtualization_enabled(self, node: Node) -> None:
virtualization_enabled = node.tools[Lscpu].is_virtualization_enabled()
mshv_exists = node.tools[Ls].path_exists(path="/dev/mshv", sudo=True)

Просмотреть файл

@ -1,8 +1,9 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import re
from dataclasses import dataclass
from pathlib import PurePath
from pathlib import Path, PurePath
from typing import Any, List, Optional, Type
from assertpy.assertpy import assert_that
@ -49,6 +50,7 @@ class CloudHypervisorTests(Tool):
hypervisor: str,
skip: Optional[List[str]] = None,
) -> None:
if skip is not None:
skip_args = " ".join(map(lambda t: f"--skip {t}", skip))
else:
@ -79,6 +81,55 @@ class CloudHypervisorTests(Tool):
assert_that(failures, f"Unexpected failures: {failures}").is_empty()
def run_metrics_tests(
self,
test_result: TestResult,
environment: Environment,
hypervisor: str,
log_path: Path,
skip: Optional[List[str]] = None,
) -> None:
self.per_mtr_report_file = log_path.joinpath("perf_metrics.json")
perf_metrics_tests = self._list_perf_metrics_tests(hypervisor=hypervisor)
for testcase in perf_metrics_tests:
status: TestStatus = TestStatus.QUEUED
metrics: str = ""
trace: str = ""
try:
result = self.run(
f"tests --hypervisor {hypervisor} --metrics -- -- \
--test-filter {testcase}",
timeout=self.TIME_OUT,
force_run=True,
cwd=self.repo_root,
no_info_log=False, # print out result of each test
shell=True,
)
output = result.stdout.replace("\r\n", "\n")
output = output.replace("\t", "")
if result.exit_code == 0:
status = TestStatus.PASSED
metrics = self._process_perf_metric_test_result(result.stdout)
else:
status = TestStatus.FAILED
trace = output
except Exception as e:
self._log.info(f"Testcase failed, tescase name: {testcase}")
status = TestStatus.FAILED
trace = str(e)
msg = metrics if status == TestStatus.PASSED else trace
self._send_subtest_msg(
test_id=test_result.id_,
environment=environment,
test_name=testcase,
test_status=status,
test_message=msg,
)
def _initialize(self, *args: Any, **kwargs: Any) -> None:
tool_path = self.get_tool_path(use_global=True)
self.repo_root = tool_path / "cloud-hypervisor"
@ -148,13 +199,64 @@ class CloudHypervisorTests(Tool):
environment: Environment,
test_name: str,
test_status: TestStatus,
test_message: str = "",
) -> None:
subtest_msg = create_test_result_message(
SubTestMessage,
test_id,
environment,
test_name,
test_status,
SubTestMessage, test_id, environment, test_name, test_status, test_message
)
notifier.notify(subtest_msg)
def _list_perf_metrics_tests(self, hypervisor: str = "kvm") -> List[str]:
tests_list = []
result = self.run(
f"tests --hypervisor {hypervisor} --metrics -- -- --list-tests",
timeout=self.TIME_OUT,
force_run=True,
cwd=self.repo_root,
shell=True,
expected_exit_code=0,
)
stdout = result.stdout
# Ex. String for below regex
# "boot_time_ms" (test_timeout=2s,test_iterations=10)
regex = '\\"(.*)\\" \\('
pattern = re.compile(regex)
tests_list = pattern.findall(stdout)
self._log.debug(f"Testcases found: {tests_list}")
return tests_list
def _process_perf_metric_test_result(self, output: str) -> str:
# Sample Output
# "git_human_readable": "v27.0",
# "git_revision": "2ba6a9bfcfd79629aecf77504fa554ab821d138e",
# "git_commit_date": "Thu Sep 29 17:56:21 2022 +0100",
# "date": "Wed Oct 12 03:51:38 UTC 2022",
# "results": [
# {
# "name": "block_multi_queue_read_MiBps",
# "mean": 158.64382311768824,
# "std_dev": 7.685502103050337,
# "max": 173.9743994350565,
# "min": 154.10646435356466
# }
# ]
# }
# real 1m39.856s
# user 0m6.376s
# sys 2m32.973s
# + RES=0
# + exit 0
output = output.replace("\n", "")
regex = '\\"results\\"\\: (.*?)\\]'
result = re.search(regex, output)
if result:
return result.group(0)
return ""