RdmaInstaller: refactor rdma-core dependency and source install (#3457)

* Ninja: add pyelftools dependency

* Installer: make only one dependency install round trip

* RdmaCore: make rdma-core source installer

Implement rdma-core source and package manager build
using Installer class. This will be handy when
implementing the 32bit dpdk/rdma-core test.

Debian kernel version compare

prefer kernel version over codename

* Installer: undo lts/latest changes

* DPDK+RdmaCore: use list for ubuntu versions

* RdmaInstaller: prefer os package check for version

* DpdkInstaller: pretty huge bugfix

* Dpdk: fix build_path -> asset_path

* Dpdk: ring_ping allow re-initialization to force source build

* DpdkInstall: stop_on_match for uninstall too

* Revert "Dpdk: ring_ping allow re-initialization to force source build"

This reverts commit 28638da236.

* Dpdk: skip if package manager is used for ring_ping

* Use create instead of get to avoid skipping init

* Dpdk/Rdma Installer: ensure installer runs and asset_path is set

* Git: delete temp branch before creating if it exists

* Dpdk: add reattempt build with modified pythonpath

* Dpdk: fix sudo python issue for meson
This commit is contained in:
mcgov 2024-10-14 20:37:29 -07:00 коммит произвёл GitHub
Родитель 285f1aa892
Коммит e9df2a6c06
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
8 изменённых файлов: 429 добавлений и 170 удалений

Просмотреть файл

@ -103,15 +103,23 @@ class Git(Tool):
return full_path return full_path
def checkout( def checkout(
self, ref: str, cwd: pathlib.PurePath, checkout_branch: str = "" self,
ref: str,
cwd: pathlib.PurePath,
checkout_branch: str = "",
) -> None: ) -> None:
delete_temp_branch = False
if not checkout_branch: if not checkout_branch:
# create a temp branch to checkout tag or commit. # create a temp branch to checkout tag or commit.
checkout_branch = f"{constants.RUN_ID}" checkout_branch = f"{constants.RUN_ID}"
# check if this name is already in use
branch_before_checkout = self.get_current_branch(cwd=cwd)
if branch_before_checkout == checkout_branch:
delete_temp_branch = True
# mark directory safe # mark directory safe
self._mark_safe(cwd) self._mark_safe(cwd)
branch_before_checkout = self.get_current_branch(cwd=cwd)
# force run to make sure checkout among branches correctly. # force run to make sure checkout among branches correctly.
result = self.run( result = self.run(
f"checkout {ref}", f"checkout {ref}",
@ -120,7 +128,20 @@ class Git(Tool):
no_info_log=True, no_info_log=True,
no_error_log=True, no_error_log=True,
) )
# delete old temp branch before checking out new one
if delete_temp_branch:
self.run(
f"branch -D {branch_before_checkout}",
force_run=True,
cwd=cwd,
no_info_log=True,
no_error_log=True,
)
result.assert_exit_code(
message=f"failed to delete old temp branch. {result.stdout}"
)
# create temp branch
result = self.run( result = self.run(
f"checkout -b {checkout_branch}", f"checkout -b {checkout_branch}",
force_run=True, force_run=True,

Просмотреть файл

@ -39,7 +39,15 @@ class Meson(Tool):
self.node.tools[Ln].create_link( self.node.tools[Ln].create_link(
f"/home/{username}/.local/bin/meson", "/usr/bin/meson", force=True f"/home/{username}/.local/bin/meson", "/usr/bin/meson", force=True
) )
# ensure sudo has access as well
self.node.execute(
"pip3 install meson",
sudo=True,
shell=True,
no_debug_log=True,
no_info_log=True,
no_error_log=True,
)
return self._check_exists() return self._check_exists()
def setup(self, args: str, cwd: PurePath, build_dir: str = "build") -> PurePath: def setup(self, args: str, cwd: PurePath, build_dir: str = "build") -> PurePath:

Просмотреть файл

@ -9,7 +9,7 @@ from lisa.executable import Tool
from lisa.operating_system import Posix from lisa.operating_system import Posix
from lisa.tools.gcc import Gcc from lisa.tools.gcc import Gcc
from lisa.tools.git import Git from lisa.tools.git import Git
from lisa.tools.python import Python from lisa.tools.python import Pip, Python
class Ninja(Tool): class Ninja(Tool):
@ -45,6 +45,7 @@ class Ninja(Tool):
self._ninja_url, self._ninja_url,
cwd=node.working_path, cwd=node.working_path,
) )
node.tools[Pip].install_packages("pyelftools")
node.execute( node.execute(
"./configure.py --bootstrap", "./configure.py --bootstrap",
cwd=node.get_pure_path(f"{str(ninja_path)}"), cwd=node.get_pure_path(f"{str(ninja_path)}"),

Просмотреть файл

@ -1,7 +1,6 @@
# Copyright (c) Microsoft Corporation. # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license. # Licensed under the MIT license.
from datetime import datetime
from pathlib import PurePath from pathlib import PurePath
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union
@ -60,11 +59,14 @@ class DependencyInstaller:
) )
# find the match for an OS, install the packages. # find the match for an OS, install the packages.
# stop on list end or if exclusive_match parameter is true. # stop on list end or if exclusive_match parameter is true.
packages: List[Union[str, Tool, Type[Tool]]] = []
for requirement in self.requirements: for requirement in self.requirements:
if requirement.matcher(os) and requirement.packages: if requirement.matcher(os) and requirement.packages:
os.install_packages(requirement.packages, extra_args=extra_args) packages += requirement.packages
if requirement.stop_on_match: if requirement.stop_on_match:
return break
os.install_packages(packages=packages, extra_args=extra_args)
# NOTE: It is up to the caller to raise an exception on an invalid OS # NOTE: It is up to the caller to raise an exception on an invalid OS
@ -95,13 +97,13 @@ class GitDownloader(Downloader):
# NOTE: fail on exists is set to True. # NOTE: fail on exists is set to True.
# The expectation is that the parent Installer class should # The expectation is that the parent Installer class should
# remove any lingering installations # remove any lingering installations
self._asset_path = self._node.tools[Git].clone( self.asset_path = self._node.tools[Git].clone(
self._git_repo, self._git_repo,
cwd=self._node.get_working_path(), cwd=self._node.get_working_path(),
ref=self._git_ref, ref=self._git_ref,
fail_on_exists=True, fail_on_exists=False,
) )
return self._asset_path return self.asset_path
# parent class for tarball source installations # parent class for tarball source installations
@ -137,7 +139,6 @@ class TarDownloader(Downloader):
self._tar_url, self._tar_url,
file_path=str(work_path), file_path=str(work_path),
overwrite=False, overwrite=False,
force_run=True,
) )
remote_path = node.get_pure_path(tarfile) remote_path = node.get_pure_path(tarfile)
self.tar_filename = remote_path.name self.tar_filename = remote_path.name
@ -149,7 +150,7 @@ class TarDownloader(Downloader):
node_path=remote_path, node_path=remote_path,
) )
# create tarfile dest dir # create tarfile dest dir
self._asset_path = work_path.joinpath( self.asset_path = work_path.joinpath(
self.tar_filename[: -(len(tarfile_suffix))] self.tar_filename[: -(len(tarfile_suffix))]
) )
# unpack into the dest dir # unpack into the dest dir
@ -159,7 +160,7 @@ class TarDownloader(Downloader):
dest_dir=str(work_path), dest_dir=str(work_path),
gzip=True, gzip=True,
) )
return self._asset_path return self.asset_path
class Installer: class Installer:
@ -169,8 +170,10 @@ class Installer:
# setup the node before starting # setup the node before starting
# ex: updating the kernel, enabling features, checking drivers, etc. # ex: updating the kernel, enabling features, checking drivers, etc.
# First we download the assets to ensure asset_path is set
# even if we end up skipping re-installation
def _setup_node(self) -> None: def _setup_node(self) -> None:
raise NotImplementedError(f"_setup_node {self._err_msg}") self._download_assets()
# check if the package is already installed: # check if the package is already installed:
# Is the package installed from source? Or from the package manager? # Is the package installed from source? Or from the package manager?
@ -181,13 +184,13 @@ class Installer:
# setup the installation (install Ninja, Meson, etc) # setup the installation (install Ninja, Meson, etc)
def _download_assets(self) -> None: def _download_assets(self) -> None:
if self._downloader: if self._downloader:
self._asset_path = self._downloader.download() self.asset_path = self._downloader.download()
else: else:
self._node.log.debug("No downloader assigned to installer.") self._node.log.debug("No downloader assigned to installer.")
# do the build and installation # do the build and installation
def _install(self) -> None: def _install(self) -> None:
self._download_assets() ...
# remove an installation # remove an installation
def _uninstall(self) -> None: def _uninstall(self) -> None:
@ -206,7 +209,8 @@ class Installer:
def _should_install(self, required_version: Optional[VersionInfo] = None) -> bool: def _should_install(self, required_version: Optional[VersionInfo] = None) -> bool:
return (not self._check_if_installed()) or ( return (not self._check_if_installed()) or (
required_version is None and required_version > self.get_installed_version() required_version is not None
and required_version > self.get_installed_version()
) )
# run the defined setup and installation steps. # run the defined setup and installation steps.
@ -247,6 +251,8 @@ class PackageManagerInstall(Installer):
for os_package_check in self._os_dependencies.requirements: for os_package_check in self._os_dependencies.requirements:
if os_package_check.matcher(self._os) and os_package_check.packages: if os_package_check.matcher(self._os) and os_package_check.packages:
self._os.uninstall_packages(os_package_check.packages) self._os.uninstall_packages(os_package_check.packages)
if os_package_check.stop_on_match:
break
# verify packages on the node have been installed by # verify packages on the node have been installed by
# the package manager # the package manager
@ -260,55 +266,40 @@ class PackageManagerInstall(Installer):
for pkg in os_package_check.packages: for pkg in os_package_check.packages:
if not self._os.package_exists(pkg): if not self._os.package_exists(pkg):
return False return False
if os_package_check.stop_on_match:
break
return True return True
# installing dependencies is the installation in this case, so just return
def _install(self) -> None:
return
def force_dpdk_default_source(variables: Dict[str, Any]) -> None: def force_dpdk_default_source(variables: Dict[str, Any]) -> None:
if not variables.get("dpdk_source", None): if not variables.get("dpdk_source", None):
variables["dpdk_source"] = DPDK_STABLE_GIT_REPO variables["dpdk_source"] = DPDK_STABLE_GIT_REPO
# rough check for ubuntu supported versions. _UBUNTU_LTS_VERSIONS = ["24.4.0", "22.4.0", "20.4.0", "18.4.0"]
# assumes:
# - canonical convention of YEAR.MONTH for major versions
# - canoical release cycle of EVEN_YEAR.04 for lts versions.
# - 4 year support cycle. 6 year for ESM
# get the age of the distro, if negative or 0, release is new.
# if > 6, distro is out of support
def is_ubuntu_lts_version(distro: Ubuntu) -> bool:
# asserts if not ubuntu OS object
version_info = distro.information.version
distro_age = _get_ubuntu_distro_age(distro)
is_even_year = (version_info.major % 2) == 0
is_april_release = version_info.minor == 4
is_within_support_window = distro_age <= 6
return is_even_year and is_april_release and is_within_support_window
# see https://ubuntu.com/about/release-cycle
def is_ubuntu_latest_or_prerelease(distro: Ubuntu) -> bool: def is_ubuntu_latest_or_prerelease(distro: Ubuntu) -> bool:
distro_age = _get_ubuntu_distro_age(distro) return bool(distro.information.version >= max(_UBUNTU_LTS_VERSIONS))
return distro_age <= 2
def _get_ubuntu_distro_age(distro: Ubuntu) -> int: # see https://ubuntu.com/about/release-cycle
version_info = distro.information.version def is_ubuntu_lts_version(distro: Ubuntu) -> bool:
# check release is within esm window major = str(distro.information.version.major)
year_string = str(datetime.today().year) minor = str(distro.information.version.minor)
assert_that(len(year_string)).described_as( # check for major+minor version match
"Package bug: The year received from datetime module is an " return any(
"unexpected size. This indicates a broken package or incorrect " [
"date in this computer." major == x.split(".", maxsplit=1)[0] and minor == x.split(".")[1]
).is_greater_than_or_equal_to(4) for x in _UBUNTU_LTS_VERSIONS
# TODO: handle the century rollover edge case in 2099 ]
current_year = int(year_string[-2:]) )
release_year = int(version_info.major)
# 23-18 == 5
# long term support and extended security updates for ~6 years # check if it's a lts release outside the initial 2 year lts window
return current_year - release_year def ubuntu_needs_backports(os: Ubuntu) -> bool:
return not is_ubuntu_latest_or_prerelease(os) and is_ubuntu_lts_version(os)
def check_dpdk_support(node: Node) -> None: def check_dpdk_support(node: Node) -> None:
@ -382,3 +373,48 @@ def unsupported_os_thrower(os: Posix) -> bool:
os, os,
message=("Installer did not define dependencies for this os."), message=("Installer did not define dependencies for this os."),
) )
def get_debian_backport_repo_args(os: Debian) -> List[str]:
# ex: 'bionic-backports' or 'buster-backports'
# these backport repos are available for the older OS's
# and include backported fixes which need to be opted into.
# So filter out recent OS's and
# add the backports repo for older ones, if it should be available.
if not isinstance(os, Debian):
return []
# don't enable backport args for releases which don't need/have them.
if isinstance(os, Ubuntu) and not ubuntu_needs_backports(os):
return []
repos = os.get_repositories()
backport_repo = f"{os.information.codename}-backports"
if any([backport_repo in repo.name for repo in repos]):
return [f"-t {backport_repo}"]
return []
# NOTE: mana_ib was added in 6.2 and backported to 5.15
# this ends up lining up with kernels that need to be updated before
# starting our DPDK tests. This function is not meant for general use
# outside of the DPDK suite.
def update_kernel_from_repo(node: Node) -> None:
assert isinstance(
node.os, (Debian, Fedora, Suse)
), f"DPDK test does not support OS type: {type(node.os)}"
if (
isinstance(node.os, Debian)
and node.os.get_kernel_information().version < "6.5.0"
):
package = "linux-azure"
elif (
isinstance(node.os, (Fedora, Suse))
and node.os.get_kernel_information().version < "5.15.0"
):
package = "kernel"
else:
return
if node.os.is_package_in_repo(package):
node.os.install_packages(package)
node.reboot()
else:
node.log.debug(f"Kernel update package '{package}' was not found.")

Просмотреть файл

@ -28,6 +28,7 @@ from lisa.tools.hugepages import HugePageSize
from lisa.util.constants import SIGINT from lisa.util.constants import SIGINT
from microsoft.testsuites.dpdk.common import ( from microsoft.testsuites.dpdk.common import (
DPDK_STABLE_GIT_REPO, DPDK_STABLE_GIT_REPO,
PackageManagerInstall,
force_dpdk_default_source, force_dpdk_default_source,
) )
from microsoft.testsuites.dpdk.dpdknffgo import DpdkNffGo from microsoft.testsuites.dpdk.dpdknffgo import DpdkNffGo
@ -527,6 +528,13 @@ class Dpdk(TestSuite):
except (NotEnoughMemoryException, UnsupportedOperationException) as err: except (NotEnoughMemoryException, UnsupportedOperationException) as err:
raise SkippedException(err) raise SkippedException(err)
testpmd = test_kit.testpmd testpmd = test_kit.testpmd
if isinstance(testpmd.installer, PackageManagerInstall):
# The Testpmd tool doesn't get re-initialized
# even if you invoke it with new arguments.
raise SkippedException(
"DPDK ring_ping test is not implemented for "
" package manager installation."
)
# grab a nic and run testpmd # grab a nic and run testpmd
git = node.tools[Git] git = node.tools[Git]
@ -534,7 +542,7 @@ class Dpdk(TestSuite):
echo = node.tools[Echo] echo = node.tools[Echo]
rping_build_env_vars = [ rping_build_env_vars = [
"export RTE_TARGET=build", "export RTE_TARGET=build",
f"export RTE_SDK={str(testpmd.dpdk_path)}", f"export RTE_SDK={str(testpmd.installer.asset_path)}",
] ]
echo.write_to_file( echo.write_to_file(
";".join(rping_build_env_vars), node.get_pure_path("~/.bashrc"), append=True ";".join(rping_build_env_vars), node.get_pure_path("~/.bashrc"), append=True

Просмотреть файл

@ -3,7 +3,7 @@
import re import re
from pathlib import PurePath, PurePosixPath from pathlib import PurePath, PurePosixPath
from typing import Any, List, Tuple, Type, Union from typing import Any, List, Tuple, Type
from assertpy import assert_that, fail from assertpy import assert_that, fail
from semver import VersionInfo from semver import VersionInfo
@ -18,7 +18,6 @@ from lisa.tools import (
Kill, Kill,
Lscpu, Lscpu,
Lspci, Lspci,
Make,
Meson, Meson,
Modprobe, Modprobe,
Ninja, Ninja,
@ -26,7 +25,6 @@ from lisa.tools import (
Pip, Pip,
Pkgconfig, Pkgconfig,
Python, Python,
Tar,
Timeout, Timeout,
Wget, Wget,
) )
@ -40,8 +38,7 @@ from microsoft.testsuites.dpdk.common import (
OsPackageDependencies, OsPackageDependencies,
PackageManagerInstall, PackageManagerInstall,
TarDownloader, TarDownloader,
is_ubuntu_latest_or_prerelease, get_debian_backport_repo_args,
is_ubuntu_lts_version,
is_url_for_git_repo, is_url_for_git_repo,
is_url_for_tarball, is_url_for_tarball,
unsupported_os_thrower, unsupported_os_thrower,
@ -53,6 +50,15 @@ PACKAGE_MANAGER_SOURCE = "package_manager"
# declare package dependencies for package manager DPDK installation # declare package dependencies for package manager DPDK installation
DPDK_PACKAGE_MANAGER_PACKAGES = DependencyInstaller( DPDK_PACKAGE_MANAGER_PACKAGES = DependencyInstaller(
requirements=[ requirements=[
# install linux-modules-extra-azure if it's available for mana_ib
# older debian kernels won't have mana_ib packaged,
# so skip the check on those kernels.
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian)
and bool(x.get_kernel_information().version >= "5.15.0")
and x.is_package_in_repo("linux-modules-extra-azure"),
packages=["linux-modules-extra-azure"],
),
OsPackageDependencies( OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian), matcher=lambda x: isinstance(x, Debian),
packages=["dpdk", "dpdk-dev"], packages=["dpdk", "dpdk-dev"],
@ -101,6 +107,15 @@ DPDK_SOURCE_INSTALL_PACKAGES = DependencyInstaller(
], ],
stop_on_match=True, stop_on_match=True,
), ),
# install linux-modules-extra-azure if it's available for mana_ib
# older debian kernels won't have mana_ib packaged,
# so skip the check on those kernels.
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian)
and bool(x.get_kernel_information().version >= "5.15.0")
and x.is_package_in_repo("linux-modules-extra-azure"),
packages=["linux-modules-extra-azure"],
),
OsPackageDependencies( OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian), matcher=lambda x: isinstance(x, Debian),
packages=[ packages=[
@ -110,7 +125,6 @@ DPDK_SOURCE_INSTALL_PACKAGES = DependencyInstaller(
"python3-pyelftools", "python3-pyelftools",
"libelf-dev", "libelf-dev",
"pkg-config", "pkg-config",
"linux-modules-extra-azure",
], ],
stop_on_match=True, stop_on_match=True,
), ),
@ -144,30 +158,18 @@ DPDK_SOURCE_INSTALL_PACKAGES = DependencyInstaller(
) )
def get_debian_backport_repo_args(os: Debian) -> List[str]:
if not isinstance(os, Debian):
return []
if isinstance(os, Ubuntu) and (
is_ubuntu_latest_or_prerelease(os) or not is_ubuntu_lts_version(os)
):
return []
repos = os.get_repositories()
backport_repo = f"{os.information.codename}-backports"
if any([backport_repo in repo.name for repo in repos]):
return [f"-t {backport_repo}"]
return []
class DpdkPackageManagerInstall(PackageManagerInstall): class DpdkPackageManagerInstall(PackageManagerInstall):
def _setup_node(self) -> None: def _setup_node(self) -> None:
if isinstance(self._os, Debian): if isinstance(self._os, Debian):
self._package_manager_extra_args = get_debian_backport_repo_args(self._os) self._package_manager_extra_args = get_debian_backport_repo_args(self._os)
if self._os.information.version < "22.4.0":
self._os.update_packages("linux-azure")
self._node.reboot()
elif isinstance(self._os, Fedora): elif isinstance(self._os, Fedora):
self._os.install_epel() self._os.install_epel()
# super setup node last in this case, since we must set
# repo args before download/install
super()._setup_node()
def get_installed_version(self) -> VersionInfo: def get_installed_version(self) -> VersionInfo:
package_name = ( package_name = (
"dpdk22" if float(self._os.information.release) == 15.5 else "dpdk" "dpdk22" if float(self._os.information.release) == 15.5 else "dpdk"
@ -205,9 +207,10 @@ class DpdkSourceInstall(Installer):
return False return False
def _setup_node(self) -> None: def _setup_node(self) -> None:
super()._setup_node()
if isinstance(self._os, Debian): if isinstance(self._os, Debian):
self._package_manager_extra_args = get_debian_backport_repo_args(self._os) self._package_manager_extra_args = get_debian_backport_repo_args(self._os)
if self._os.information.version < "22.4.0": if isinstance(self._os, Ubuntu) and self._os.information.version < "22.4.0":
self._os.update_packages("linux-azure") self._os.update_packages("linux-azure")
self._node.reboot() self._node.reboot()
# install( Tool ) doesn't seem to install the tool until it's used :\ # install( Tool ) doesn't seem to install the tool until it's used :\
@ -223,21 +226,20 @@ class DpdkSourceInstall(Installer):
self._node.tools[Ninja].run( self._node.tools[Ninja].run(
"uninstall", shell=True, sudo=True, cwd=self.dpdk_build_path "uninstall", shell=True, sudo=True, cwd=self.dpdk_build_path
) )
source_path = str(self._asset_path)
working_path = str(self._node.get_working_path()) working_path = str(self._node.get_working_path())
assert_that(str(source_path)).described_as( assert_that(str(self.dpdk_build_path)).described_as(
"DPDK Installer source path was empty during attempted cleanup!" "DPDK Installer source path was empty during attempted cleanup!"
).is_not_empty() ).is_not_empty()
assert_that(str(source_path)).described_as( assert_that(str(self.dpdk_build_path)).described_as(
"DPDK Installer source path was set to root dir " "DPDK Installer source path was set to root dir "
"'/' during attempted cleanup!" "'/' during attempted cleanup!"
).is_not_equal_to("/") ).is_not_equal_to("/")
assert_that(str(source_path)).described_as( assert_that(str(self.dpdk_build_path)).described_as(
f"DPDK Installer source path {source_path} was set to " f"DPDK Installer source path {self.dpdk_build_path} was set to "
f"working path '{working_path}' during attempted cleanup!" f"working path '{working_path}' during attempted cleanup!"
).is_not_equal_to(working_path) ).is_not_equal_to(working_path)
# remove source code directory # remove build path only since we may want the repo again.
self._node.execute(f"rm -rf {str(source_path)}", shell=True) self._node.execute(f"rm -rf {str(self.dpdk_build_path)}", shell=True)
def get_installed_version(self) -> VersionInfo: def get_installed_version(self) -> VersionInfo:
return self._node.tools[Pkgconfig].get_package_version( return self._node.tools[Pkgconfig].get_package_version(
@ -254,10 +256,11 @@ class DpdkSourceInstall(Installer):
# save the pythonpath for later # save the pythonpath for later
python_path = node.tools[Python].get_python_path() python_path = node.tools[Python].get_python_path()
self.dpdk_build_path = node.tools[Meson].setup( self.dpdk_build_path = node.tools[Meson].setup(
args=sample_apps, build_dir="build", cwd=self._asset_path args=sample_apps, build_dir="build", cwd=self.asset_path
) )
node.tools[Ninja].run( node.tools[Ninja].run(
cwd=self.dpdk_build_path, cwd=self.dpdk_build_path,
shell=True,
timeout=1800, timeout=1800,
expected_exit_code=0, expected_exit_code=0,
expected_exit_code_failure_message=( expected_exit_code_failure_message=(
@ -273,11 +276,13 @@ class DpdkSourceInstall(Installer):
"install", "install",
cwd=self.dpdk_build_path, cwd=self.dpdk_build_path,
sudo=True, sudo=True,
shell=True,
expected_exit_code=0, expected_exit_code=0,
expected_exit_code_failure_message=( expected_exit_code_failure_message=(
"ninja install failed for dpdk binaries." "ninja install failed for dpdk binaries."
), ),
update_envs={"PYTHONPATH": f"$PYTHONPATH:{python_path}"}, update_envs={"PYTHONPATH": f"{python_path}:$PYTHONPATH"},
force_run=True,
) )
node.execute( node.execute(
"ldconfig", "ldconfig",
@ -305,10 +310,10 @@ class DpdkGitDownloader(GitDownloader):
if not self._git_ref: if not self._git_ref:
git = self._node.tools[Git] git = self._node.tools[Git]
self._git_ref = git.get_tag( self._git_ref = git.get_tag(
self._asset_path, filter_=r"^v.*" # starts w 'v' self.asset_path, filter_=r"^v.*" # starts w 'v'
) )
git.checkout(self._git_ref, cwd=self._asset_path) git.checkout(self._git_ref, cwd=self.asset_path)
return self._asset_path return self.asset_path
class DpdkTestpmd(Tool): class DpdkTestpmd(Tool):
@ -403,22 +408,6 @@ class DpdkTestpmd(Tool):
} }
_source_build_dest_dir = "/usr/local/bin" _source_build_dest_dir = "/usr/local/bin"
def get_rdma_core_package_name(self) -> str:
distro = self.node.os
package = ""
# check if rdma-core is installed already...
if self.node.tools[Pkgconfig].package_info_exists("libibuverbs"):
return package
if isinstance(distro, Debian):
package = "rdma-core ibverbs-providers libibverbs-dev"
elif isinstance(distro, Suse):
package = "rdma-core-devel librdmacm1"
elif isinstance(distro, Fedora):
package = "librdmacm-devel"
else:
fail("Invalid OS for rdma-core source installation.")
return package
@property @property
def can_install(self) -> bool: def can_install(self) -> bool:
for _os in [Debian, Fedora, Suse]: for _os in [Debian, Fedora, Suse]:
@ -734,12 +723,6 @@ class DpdkTestpmd(Tool):
def get_mean_rx_pps_sriov_rescind(self) -> Tuple[int, int, int]: def get_mean_rx_pps_sriov_rescind(self) -> Tuple[int, int, int]:
return self._get_pps_sriov_rescind(self._rx_pps_key) return self._get_pps_sriov_rescind(self._rx_pps_key)
def add_sample_apps_to_build_list(self, apps: Union[List[str], None]) -> None:
if apps:
self._sample_apps_to_build = apps
else:
self._sample_apps_to_build = []
def get_example_app_path(self, app_name: str) -> PurePath: def get_example_app_path(self, app_name: str) -> PurePath:
if isinstance(self.installer, DpdkSourceInstall): if isinstance(self.installer, DpdkSourceInstall):
return self.installer.dpdk_build_path.joinpath("examples").joinpath( return self.installer.dpdk_build_path.joinpath("examples").joinpath(
@ -839,54 +822,6 @@ class DpdkTestpmd(Tool):
f"empty or all zeroes for dpdktestpmd.{rx_or_tx.lower()}_pps_data." f"empty or all zeroes for dpdktestpmd.{rx_or_tx.lower()}_pps_data."
).is_true() ).is_true()
def _install_upstream_rdma_core_for_mana(self) -> None:
node = self.node
wget = node.tools[Wget]
make = node.tools[Make]
tar = node.tools[Tar]
distro = node.os
if isinstance(distro, Debian):
distro.install_packages(
"cmake libudev-dev "
"libnl-3-dev libnl-route-3-dev ninja-build pkg-config "
"valgrind python3-dev cython3 python3-docutils pandoc "
"libssl-dev libelf-dev python3-pip libnuma-dev"
)
elif isinstance(distro, Fedora):
distro.group_install_packages("Development Tools")
distro.install_packages(
"cmake gcc libudev-devel "
"libnl3-devel pkg-config "
"valgrind python3-devel python3-docutils "
"openssl-devel unzip "
"elfutils-devel python3-pip libpcap-devel "
"tar wget dos2unix psmisc kernel-devel-$(uname -r) "
"librdmacm-devel libmnl-devel kernel-modules-extra numactl-devel "
"kernel-headers elfutils-libelf-devel meson ninja-build libbpf-devel "
)
else:
# check occcurs before this function
return
tar_path = wget.get(
url=(
"https://github.com/linux-rdma/rdma-core/"
"releases/download/v50.1/rdma-core-50.1.tar.gz"
),
file_path=str(node.working_path),
)
tar.extract(tar_path, dest_dir=str(node.working_path), gzip=True, sudo=True)
source_path = node.working_path.joinpath("rdma-core-50.1")
node.execute(
"cmake -DIN_PLACE=0 -DNO_MAN_PAGES=1 -DCMAKE_INSTALL_PREFIX=/usr",
shell=True,
cwd=source_path,
sudo=True,
)
make.make_install(source_path)
def _install(self) -> bool: def _install(self) -> bool:
self._testpmd_output_after_reenable = "" self._testpmd_output_after_reenable = ""
self._testpmd_output_before_rescind = "" self._testpmd_output_before_rescind = ""
@ -898,14 +833,11 @@ class DpdkTestpmd(Tool):
if isinstance(node.os, Ubuntu) and node.os.information.codename == "bionic": if isinstance(node.os, Ubuntu) and node.os.information.codename == "bionic":
# bionic needs to update to latest first # bionic needs to update to latest first
node.os.update_packages("") node.os.update_packages("")
if self.is_mana: if self.is_mana and not (
if not (isinstance(node.os, Ubuntu) or isinstance(node.os, Fedora)): isinstance(node.os, Ubuntu) or isinstance(node.os, Fedora)
raise SkippedException("MANA DPDK test is not supported on this OS") ):
# ensure no older dependency is installed raise SkippedException("MANA DPDK test is not supported on this OS")
node.os.uninstall_packages("rdma-core")
self._install_upstream_rdma_core_for_mana()
else:
node.os.install_packages(self.get_rdma_core_package_name())
self.installer.do_installation() self.installer.do_installation()
self._dpdk_version_info = self.installer.get_installed_version() self._dpdk_version_info = self.installer.get_installed_version()
self._load_drivers_for_dpdk() self._load_drivers_for_dpdk()

Просмотреть файл

@ -48,9 +48,23 @@ from lisa.util.parallel import TaskManager, run_in_parallel, run_in_parallel_asy
from microsoft.testsuites.dpdk.common import ( from microsoft.testsuites.dpdk.common import (
AZ_ROUTE_ALL_TRAFFIC, AZ_ROUTE_ALL_TRAFFIC,
DPDK_STABLE_GIT_REPO, DPDK_STABLE_GIT_REPO,
Downloader,
GitDownloader,
Installer,
TarDownloader,
check_dpdk_support, check_dpdk_support,
is_url_for_git_repo,
is_url_for_tarball,
update_kernel_from_repo,
) )
from microsoft.testsuites.dpdk.dpdktestpmd import PACKAGE_MANAGER_SOURCE, DpdkTestpmd from microsoft.testsuites.dpdk.dpdktestpmd import PACKAGE_MANAGER_SOURCE, DpdkTestpmd
from microsoft.testsuites.dpdk.rdmacore import (
RDMA_CORE_MANA_DEFAULT_SOURCE,
RDMA_CORE_PACKAGE_MANAGER_DEPENDENCIES,
RDMA_CORE_SOURCE_DEPENDENCIES,
RdmaCorePackageManagerInstall,
RdmaCoreSourceInstaller,
)
# DPDK added new flags in 19.11 that some tests rely on for send/recv # DPDK added new flags in 19.11 that some tests rely on for send/recv
@ -108,6 +122,35 @@ def _set_forced_source_by_distro(node: Node, variables: Dict[str, Any]) -> None:
variables["dpdk_branch"] = variables.get("dpdk_branch", "v20.11") variables["dpdk_branch"] = variables.get("dpdk_branch", "v20.11")
def get_rdma_core_installer(
node: Node, dpdk_source: str, dpdk_branch: str, rdma_source: str, rdma_branch: str
) -> Installer:
# set rdma-core installer type.
if rdma_source:
if is_url_for_git_repo(rdma_source):
# else, if we have a user provided rdma-core source, use it
downloader: Downloader = GitDownloader(node, rdma_source, rdma_branch)
elif is_url_for_tarball(rdma_branch):
downloader = TarDownloader(node, rdma_source)
else:
# throw on unrecognized rdma core source type
raise AssertionError(
f"Invalid rdma-core source uri provided: {rdma_source}"
)
# handle MANA special case, build a default rdma-core with mana provider
elif not rdma_source and node.nics.is_mana_device_present():
downloader = TarDownloader(node, RDMA_CORE_MANA_DEFAULT_SOURCE)
else:
# no rdma_source and not mana, just use the package manager
return RdmaCorePackageManagerInstall(
node, os_dependencies=RDMA_CORE_PACKAGE_MANAGER_DEPENDENCIES
)
# return the installer with the downloader we've picked
return RdmaCoreSourceInstaller(
node, os_dependencies=RDMA_CORE_SOURCE_DEPENDENCIES, downloader=downloader
)
def _ping_all_nodes_in_environment(environment: Environment) -> None: def _ping_all_nodes_in_environment(environment: Environment) -> None:
# a quick connectivity check before the test. # a quick connectivity check before the test.
# this can help establish routes on some platforms before handing # this can help establish routes on some platforms before handing
@ -245,6 +288,8 @@ def initialize_node_resources(
dpdk_source = variables.get("dpdk_source", PACKAGE_MANAGER_SOURCE) dpdk_source = variables.get("dpdk_source", PACKAGE_MANAGER_SOURCE)
dpdk_branch = variables.get("dpdk_branch", "") dpdk_branch = variables.get("dpdk_branch", "")
rdma_source = variables.get("rdma_source", "")
rdma_branch = variables.get("rdma_branch", "")
force_net_failsafe_pmd = variables.get("dpdk_force_net_failsafe_pmd", False) force_net_failsafe_pmd = variables.get("dpdk_force_net_failsafe_pmd", False)
log.info( log.info(
"Dpdk initialize_node_resources running" "Dpdk initialize_node_resources running"
@ -273,15 +318,26 @@ def initialize_node_resources(
# verify SRIOV is setup as-expected on the node after compat check # verify SRIOV is setup as-expected on the node after compat check
node.nics.check_pci_enabled(pci_enabled=True) node.nics.check_pci_enabled(pci_enabled=True)
update_kernel_from_repo(node)
rdma_core = get_rdma_core_installer(
node, dpdk_source, dpdk_branch, rdma_source, rdma_branch
)
rdma_core.do_installation()
# create tool, initialize testpmd tool (installs dpdk) # create tool, initialize testpmd tool (installs dpdk)
testpmd: DpdkTestpmd = node.tools.get( # use create over get to avoid skipping
# reinitialization of tool when new arguments are present
testpmd: DpdkTestpmd = node.tools.create(
DpdkTestpmd, DpdkTestpmd,
dpdk_source=dpdk_source, dpdk_source=dpdk_source,
dpdk_branch=dpdk_branch, dpdk_branch=dpdk_branch,
sample_apps=sample_apps, sample_apps=sample_apps,
force_net_failsafe_pmd=force_net_failsafe_pmd, force_net_failsafe_pmd=force_net_failsafe_pmd,
) )
# Tools will skip installation if the binary is present, so
# force invoke install. Installer will skip if the correct
# *type* of installation is already installed,
# taking it's creation arguments into account.
testpmd.install()
# init and enable hugepages (required by dpdk) # init and enable hugepages (required by dpdk)
hugepages = node.tools[Hugepages] hugepages = node.tools[Hugepages]
@ -1027,3 +1083,6 @@ def create_l3fwd_rules_files(
forwarder.tools[Echo].write_to_file( forwarder.tools[Echo].write_to_file(
"\n".join(sample_rules_v6), rules_v6, append=True "\n".join(sample_rules_v6), rules_v6, append=True
) )
DPDK_VERSION_TO_RDMA_CORE_MAP = {"20.11": "46.1", "21.11": ""}

Просмотреть файл

@ -0,0 +1,194 @@
from assertpy import assert_that
from semver import VersionInfo
from lisa.operating_system import Debian, Fedora, Suse
from lisa.tools import Make, Pkgconfig
from microsoft.testsuites.dpdk.common import (
DependencyInstaller,
Installer,
OsPackageDependencies,
PackageManagerInstall,
get_debian_backport_repo_args,
unsupported_os_thrower,
)
RDMA_CORE_MANA_DEFAULT_SOURCE = (
"https://github.com/linux-rdma/rdma-core/"
"releases/download/v50.1/rdma-core-50.1.tar.gz"
)
RDMA_CORE_SOURCE_DEPENDENCIES = DependencyInstaller(
[
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian)
# install linux-modules-extra-azure if it's available for mana_ib
# older debian kernels won't have mana_ib packaged,
# so skip the check on those kernels.
and bool(x.get_kernel_information().version >= "5.15.0")
and x.is_package_in_repo("linux-modules-extra-azure"),
packages=["linux-modules-extra-azure"],
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian),
packages=[
"cmake",
"libudev-dev",
"libnl-3-dev",
"libnl-route-3-dev",
"ninja-build",
"pkg-config",
"valgrind",
"python3-dev",
"cython3",
"python3-docutils",
"pandoc",
"libssl-dev",
"libelf-dev",
"python3-pip",
"libnuma-dev",
],
stop_on_match=True,
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, Fedora),
packages=[
"cmake",
"libudev-devel",
"libnl3-devel",
"pkg-config",
"valgrind",
"python3-devel",
"openssl-devel",
"unzip",
"elfutils-devel",
"python3-pip",
"tar",
"wget",
"dos2unix",
"psmisc",
"kernel-devel-$(uname -r)",
"librdmacm-devel",
"libmnl-devel",
"kernel-modules-extra",
"numactl-devel",
"kernel-headers",
"elfutils-libelf-devel",
"libbpf-devel",
],
stop_on_match=True,
),
# FIXME: SUSE rdma-core build packages not implemented
# for source builds.
OsPackageDependencies(matcher=unsupported_os_thrower),
]
)
RDMA_CORE_PACKAGE_MANAGER_DEPENDENCIES = DependencyInstaller(
[
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian)
# install linux-modules-extra-azure if it's available for mana_ib
# older debian kernels won't have mana_ib packaged,
# so skip the check on those kernels.
and bool(x.get_kernel_information().version >= "5.15.0")
and x.is_package_in_repo("linux-modules-extra-azure"),
packages=["linux-modules-extra-azure"],
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, Debian),
packages=["ibverbs-providers", "libibverbs-dev"],
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, Suse),
packages=["rdma-core-devel", "librdmacm1"],
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, Fedora),
packages=["librdmacm-devel"],
),
OsPackageDependencies(
matcher=lambda x: isinstance(x, (Fedora, Debian, Suse)),
packages=["rdma-core"],
stop_on_match=True,
),
OsPackageDependencies(matcher=unsupported_os_thrower),
]
)
class RdmaCorePackageManagerInstall(PackageManagerInstall):
def _setup_node(self) -> None:
if isinstance(self._os, Fedora):
self._os.install_epel()
if isinstance(self._os, Debian):
self._package_manager_extra_args = get_debian_backport_repo_args(self._os)
super()._setup_node()
def get_installed_version(self) -> VersionInfo:
return self._os.get_package_information("rdma-core", use_cached=False)
def _check_if_installed(self) -> bool:
return self._os.package_exists("rdma-core")
# implement SourceInstall for DPDK
class RdmaCoreSourceInstaller(Installer):
def _check_if_installed(self) -> bool:
try:
package_manager_install = self._os.package_exists("rdma-core")
# _get_installed_version for source install throws
# if package is not found. So we don't need the result,
# if the function doesn't throw, the version was found.
_ = self.get_installed_version()
# this becomes '(not package manager installed) and
# _get_installed_version() doesn't throw'
return not package_manager_install
except AssertionError:
# _get_installed_version threw an AssertionError
# so PkgConfig info was not found
return False
def _setup_node(self) -> None:
if isinstance(self._os, (Debian, Fedora, Suse)):
self._os.uninstall_packages("rdma-core")
if isinstance(self._os, Fedora):
self._os.group_install_packages("Development Tools")
super()._setup_node()
def _uninstall(self) -> None:
# undo source installation (thanks ninja)
if not self._check_if_installed():
return
self._node.tools[Make].run(
parameters="uninstall", shell=True, sudo=True, cwd=self.asset_path
)
working_path = str(self._node.get_working_path())
assert_that(str(self.asset_path)).described_as(
"RDMA Installer source path was empty during attempted cleanup!"
).is_not_empty()
assert_that(str(self.asset_path)).described_as(
"RDMA Installer source path was set to root dir "
"'/' during attempted cleanup!"
).is_not_equal_to("/")
assert_that(str(self.asset_path)).described_as(
f"RDMA Installer source path {self.asset_path} was set to "
f"working path '{working_path}' during attempted cleanup!"
).is_not_equal_to(working_path)
# remove source code directory
self._node.execute(f"rm -rf {str(self.asset_path)}", shell=True)
def get_installed_version(self) -> VersionInfo:
return self._node.tools[Pkgconfig].get_package_version(
"libibverbs", update_cached=True
)
def _install(self) -> None:
super()._install()
node = self._node
make = node.tools[Make]
node.execute(
"cmake -DIN_PLACE=0 -DNO_MAN_PAGES=1 -DCMAKE_INSTALL_PREFIX=/usr",
shell=True,
cwd=self.asset_path,
sudo=True,
)
make.make_install(self.asset_path)