Add `Optional` Environment (#32332)
* updates to eng/ci_tools and eng/test_tools.txt to allow necessary additions for compatibility * added new namespace to azure-sdk-tools: ci_tools.scenario. * ci_tools.scenario is used to emplace any code that actually does the action of installing a dependency, replacing requirements, building on the fly, etc. This means that create_package_and_install and replace_dev_reqs both are moved into this module * created tox environment 'optional'. This tox environment utilizes array items of pyproject.toml config item 'tool.azure-sdk-build.optional' to install requirements for specific test scenarios before invoking tests against them. These tests are run within a custom venv for each optional configuration item in pyproject.toml, location .tox/optional/<envname>/ * the new 'optional' environment is enhanced to allow users to pass in the target environment EG: 'tox -e optional --root . -c ../../../eng/tox/tox.ini -- --optional no_requests' Co-authored-by: Laurent Mazuel <laurent.mazuel@gmail.com> Co-authored-by: McCoy Patiño <39780829+mccoyp@users.noreply.github.com>
This commit is contained in:
Родитель
c31596f13a
Коммит
da947a9fce
|
@ -59,6 +59,7 @@ conda/assembly/
|
|||
conda/assembled/
|
||||
conda/downloaded/
|
||||
conda/conda-env/
|
||||
scenario_*.txt
|
||||
|
||||
# tox environment folders
|
||||
.tox/
|
||||
|
|
|
@ -127,6 +127,7 @@
|
|||
"conda/conda-releaselogs/azure-mgmt.md"
|
||||
],
|
||||
"words": [
|
||||
"spinup",
|
||||
"cibuildwheel",
|
||||
"aoai",
|
||||
"pyprojects",
|
||||
|
|
|
@ -11,7 +11,7 @@ import argparse
|
|||
import os
|
||||
import logging
|
||||
from ci_tools.variables import in_ci
|
||||
from ci_tools.functions import build_and_install_dev_reqs
|
||||
from ci_tools.scenario.generation import build_and_install_dev_reqs
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
|
||||
|
|
|
@ -9,83 +9,12 @@
|
|||
# it should be executed from tox with `{toxenvdir}/python` to ensure that the package
|
||||
# can be successfully tested from within a tox environment.
|
||||
|
||||
from subprocess import check_call, CalledProcessError
|
||||
import argparse
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
import glob
|
||||
import shutil
|
||||
|
||||
from tox_helper_tasks import get_pip_list_output
|
||||
from ci_tools.parsing import ParsedSetup, parse_require
|
||||
from ci_tools.build import create_package
|
||||
from ci_tools.functions import get_package_from_repo, find_whl, find_sdist, discover_prebuilt_package
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
from ci_tools.parsing import ParsedSetup
|
||||
|
||||
|
||||
def cleanup_build_artifacts(build_folder):
|
||||
# clean up egginfo
|
||||
results = glob.glob(os.path.join(build_folder, "*.egg-info"))
|
||||
|
||||
if results:
|
||||
print(results[0])
|
||||
shutil.rmtree(results[0])
|
||||
|
||||
# clean up build results
|
||||
build_path = os.path.join(build_folder, "build")
|
||||
if os.path.exists(build_path):
|
||||
shutil.rmtree(build_path)
|
||||
|
||||
|
||||
def discover_packages(setuppy_path, args):
|
||||
packages = []
|
||||
if os.getenv("PREBUILT_WHEEL_DIR") is not None and not args.force_create:
|
||||
packages = discover_prebuilt_package(os.getenv("PREBUILT_WHEEL_DIR"), setuppy_path, args.package_type)
|
||||
pkg = ParsedSetup.from_path(setuppy_path)
|
||||
|
||||
if not packages:
|
||||
logging.error(
|
||||
"Package is missing in prebuilt directory {0} for package {1} and version {2}".format(
|
||||
os.getenv("PREBUILT_WHEEL_DIR"), pkg.name, pkg.version
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
else:
|
||||
packages = build_and_discover_package(
|
||||
setuppy_path,
|
||||
args.distribution_directory,
|
||||
args.target_setup,
|
||||
args.package_type,
|
||||
)
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def in_ci():
|
||||
return os.getenv("TF_BUILD", False)
|
||||
|
||||
|
||||
def build_and_discover_package(setuppy_path, dist_dir, target_setup, package_type):
|
||||
if package_type == "wheel":
|
||||
create_package(setuppy_path, dist_dir, enable_sdist=False)
|
||||
else:
|
||||
create_package(setuppy_path, dist_dir, enable_wheel=False)
|
||||
|
||||
prebuilt_packages = [
|
||||
f
|
||||
for f in os.listdir(args.distribution_directory)
|
||||
if f.endswith(".whl" if package_type == "wheel" else ".tar.gz")
|
||||
]
|
||||
|
||||
if not in_ci():
|
||||
logging.info("Cleaning up build directories and files")
|
||||
cleanup_build_artifacts(target_setup)
|
||||
return prebuilt_packages
|
||||
|
||||
from ci_tools.scenario.generation import create_package_and_install
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -119,6 +48,7 @@ if __name__ == "__main__":
|
|||
"--cache-dir",
|
||||
dest="cache_dir",
|
||||
help="Location that, if present, will be used as the pip cache directory.",
|
||||
default=None
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -126,12 +56,14 @@ if __name__ == "__main__":
|
|||
"--work-dir",
|
||||
dest="work_dir",
|
||||
help="Location that, if present, will be used as working directory to run pip install.",
|
||||
default=None
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force-create",
|
||||
dest="force_create",
|
||||
help="Force recreate whl even if it is prebuilt",
|
||||
default=False
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -150,117 +82,16 @@ if __name__ == "__main__":
|
|||
|
||||
args = parser.parse_args()
|
||||
|
||||
commands_options = []
|
||||
built_pkg_path = ""
|
||||
setup_py_path = os.path.join(args.target_setup, "setup.py")
|
||||
additional_downloaded_reqs = []
|
||||
create_package_and_install(
|
||||
distribution_directory=args.distribution_directory,
|
||||
target_setup=args.target_setup,
|
||||
skip_install=args.skip_install,
|
||||
cache_dir=args.cache_dir,
|
||||
work_dir=args.work_dir,
|
||||
force_create=args.force_create,
|
||||
package_type=args.package_type,
|
||||
pre_download_disabled=args.pre_download_disabled,
|
||||
)
|
||||
|
||||
if not os.path.exists(args.distribution_directory):
|
||||
os.mkdir(args.distribution_directory)
|
||||
|
||||
tmp_dl_folder = os.path.join(args.distribution_directory, "dl")
|
||||
if not os.path.exists(tmp_dl_folder):
|
||||
os.mkdir(tmp_dl_folder)
|
||||
|
||||
# preview version is enabled when installing dev build so pip will install dev build version from devpos feed
|
||||
if os.getenv("SetDevVersion", "false") == "true":
|
||||
commands_options.append("--pre")
|
||||
|
||||
if args.cache_dir:
|
||||
commands_options.extend(["--cache-dir", args.cache_dir])
|
||||
|
||||
discovered_packages = discover_packages(setup_py_path, args)
|
||||
|
||||
if args.skip_install:
|
||||
logging.info("Flag to skip install whl is passed. Skipping package installation")
|
||||
else:
|
||||
for built_package in discovered_packages:
|
||||
if os.getenv("PREBUILT_WHEEL_DIR") is not None and not args.force_create:
|
||||
# find the prebuilt package in the set of prebuilt wheels
|
||||
package_path = os.path.join(os.environ["PREBUILT_WHEEL_DIR"], built_package)
|
||||
if os.path.isfile(package_path):
|
||||
built_pkg_path = package_path
|
||||
logging.info("Installing {w} from directory".format(w=built_package))
|
||||
# it does't exist, so we need to error out
|
||||
else:
|
||||
logging.error("{w} not present in the prebuilt package directory. Exiting.".format(w=built_package))
|
||||
exit(1)
|
||||
else:
|
||||
built_pkg_path = os.path.abspath(os.path.join(args.distribution_directory, built_package))
|
||||
logging.info("Installing {w} from fresh built package.".format(w=built_package))
|
||||
|
||||
if not args.pre_download_disabled:
|
||||
requirements = ParsedSetup.from_path(
|
||||
os.path.join(os.path.abspath(args.target_setup), "setup.py")
|
||||
).requires
|
||||
azure_requirements = [req.split(";")[0] for req in requirements if req.startswith("azure-")]
|
||||
|
||||
if azure_requirements:
|
||||
logging.info(
|
||||
"Found {} azure requirement(s): {}".format(len(azure_requirements), azure_requirements)
|
||||
)
|
||||
|
||||
download_command = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"-d",
|
||||
tmp_dl_folder,
|
||||
"--no-deps",
|
||||
]
|
||||
|
||||
installation_additions = []
|
||||
|
||||
# only download a package if the requirement is not already met, so walk across
|
||||
# direct install_requires
|
||||
for req in azure_requirements:
|
||||
addition_necessary = True
|
||||
# get all installed packages
|
||||
installed_pkgs = get_pip_list_output()
|
||||
|
||||
# parse the specifier
|
||||
req_name, req_specifier = parse_require(req)
|
||||
|
||||
# if we have the package already present...
|
||||
if req_name in installed_pkgs:
|
||||
# if there is no specifier for the requirement, we can ignore it
|
||||
if req_specifier is None:
|
||||
addition_necessary = False
|
||||
|
||||
# ...do we need to install the new version? if the existing specifier matches, we're fine
|
||||
if req_specifier is not None and installed_pkgs[req_name] in req_specifier:
|
||||
addition_necessary = False
|
||||
|
||||
if addition_necessary:
|
||||
# we only want to add an additional rec for download if it actually exists
|
||||
# in the upstream feed (either dev or pypi)
|
||||
# if it doesn't, we should just install the relative dep if its an azure package
|
||||
installation_additions.append(req)
|
||||
|
||||
if installation_additions:
|
||||
non_present_reqs = []
|
||||
for addition in installation_additions:
|
||||
try:
|
||||
check_call(
|
||||
download_command + [addition] + commands_options,
|
||||
env=dict(os.environ, PIP_EXTRA_INDEX_URL=""),
|
||||
)
|
||||
except CalledProcessError as e:
|
||||
req_name, req_specifier = parse_require(addition)
|
||||
non_present_reqs.append(req_name)
|
||||
|
||||
additional_downloaded_reqs = [
|
||||
os.path.abspath(os.path.join(tmp_dl_folder, pth)) for pth in os.listdir(tmp_dl_folder)
|
||||
] + [get_package_from_repo(relative_req).folder for relative_req in non_present_reqs]
|
||||
|
||||
commands = [sys.executable, "-m", "pip", "install", built_pkg_path]
|
||||
commands.extend(additional_downloaded_reqs)
|
||||
commands.extend(commands_options)
|
||||
|
||||
if args.work_dir and os.path.exists(args.work_dir):
|
||||
logging.info("Executing command from {0}:{1}".format(args.work_dir, commands))
|
||||
check_call(commands, cwd=args.work_dir)
|
||||
else:
|
||||
check_call(commands)
|
||||
logging.info("Installed {w}".format(w=built_package))
|
||||
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
import argparse
|
||||
from ci_tools.scenario.generation import prepare_and_test_optional
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""This entrypoint provides automatic invocation of the 'optional' requirements for a given package. View the pyproject.toml within the targeted package folder to see configuration.""",
|
||||
)
|
||||
|
||||
parser.add_argument("-t", "--target", dest="target", help="The target package path", required=True)
|
||||
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--optional",
|
||||
dest="optional",
|
||||
help="The target environment. If not provided, all optional environments will be run.",
|
||||
required=False,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--temp",
|
||||
dest="temp_dir",
|
||||
help="The temp directory this script will work in.",
|
||||
required=False,
|
||||
)
|
||||
|
||||
args, _ = parser.parse_known_args()
|
||||
exit(prepare_and_test_optional(mapped_args=args))
|
|
@ -539,6 +539,7 @@ deps=
|
|||
commands=
|
||||
black --config {repository_root}/eng/black-pyproject.toml {posargs}
|
||||
|
||||
|
||||
[testenv:generate]
|
||||
description=Regenerate the code
|
||||
skip_install=true
|
||||
|
@ -547,3 +548,16 @@ deps =
|
|||
tomli==2.0.1
|
||||
commands=
|
||||
python -m packaging_tools.generate_client
|
||||
|
||||
|
||||
[testenv:optional]
|
||||
skipsdist = true
|
||||
skip_install = true
|
||||
usedevelop = true
|
||||
changedir = {toxinidir}
|
||||
setenv =
|
||||
{[testenv]setenv}
|
||||
PROXY_URL=http://localhost:5018
|
||||
commands =
|
||||
{envbindir}/python -m pip install {toxinidir}/../../../tools/azure-sdk-tools[build]
|
||||
python {repository_root}/eng/tox/run_optional.py -t {toxinidir} --temp={envtmpdir} {posargs}
|
||||
|
|
|
@ -19,31 +19,6 @@ import re
|
|||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
|
||||
def get_pip_list_output():
|
||||
"""Uses the invoking python executable to get the output from pip list."""
|
||||
out = subprocess.Popen(
|
||||
[sys.executable, "-m", "pip", "list", "--disable-pip-version-check", "--format", "freeze"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
stdout, stderr = out.communicate()
|
||||
|
||||
collected_output = {}
|
||||
|
||||
if stdout and (stderr is None):
|
||||
# this should be compatible with py27 https://docs.python.org/2.7/library/stdtypes.html#str.decode
|
||||
for line in stdout.decode("utf-8").split(os.linesep)[2:]:
|
||||
if line:
|
||||
package, version = re.split("==", line)
|
||||
collected_output[package] = version
|
||||
else:
|
||||
raise Exception(stderr)
|
||||
|
||||
return collected_output
|
||||
|
||||
|
||||
def unzip_sdist_to_directory(containing_folder: str) -> str:
|
||||
zips = glob.glob(os.path.join(containing_folder, "*.zip"))
|
||||
|
||||
|
|
|
@ -18,8 +18,9 @@ from common_tasks import (
|
|||
from ci_tools.variables import in_ci
|
||||
from ci_tools.environment_exclusions import filter_tox_environment_string
|
||||
from ci_tools.ci_interactions import output_ci_warning
|
||||
from ci_tools.functions import build_whl_for_req, cleanup_directory, replace_dev_reqs
|
||||
from pkg_resources import parse_requirements
|
||||
from ci_tools.scenario.generation import replace_dev_reqs
|
||||
from ci_tools.functions import cleanup_directory
|
||||
from pkg_resources import parse_requirements, RequirementParseError
|
||||
import logging
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
|
|
@ -7,4 +7,16 @@ strict_sphinx = true
|
|||
# For test environments or static checks where a check should be run by default, not explicitly disabling will enable the check.
|
||||
# pylint is enabled by default, so there is no reason for a pylint = true in every pyproject.toml.
|
||||
#
|
||||
# For newly added checks that are not enabled by default, packages should opt IN by "<check> = true".
|
||||
# For newly added checks that are not enabled by default, packages should opt IN by "<check> = true".
|
||||
|
||||
[[tool.azure-sdk-build.optional]]
|
||||
name = "no_requests"
|
||||
install = []
|
||||
uninstall = ["requests"]
|
||||
additional_pytest_args = ["--ignore-glob='*_async.py'"]
|
||||
|
||||
[[tool.azure-sdk-build.optional]]
|
||||
name = "no_aiohttp"
|
||||
install = []
|
||||
uninstall = ["aiohttp"]
|
||||
additional_pytest_args = ["-k", "_async.py"]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import argparse, sys, os, logging, pdb
|
||||
import argparse, sys, os, logging, glob, shutil
|
||||
|
||||
from subprocess import run
|
||||
|
||||
|
@ -126,6 +126,19 @@ def build() -> None:
|
|||
)
|
||||
|
||||
|
||||
def cleanup_build_artifacts(build_folder):
|
||||
# clean up egginfo
|
||||
results = glob.glob(os.path.join(build_folder, "*.egg-info"))
|
||||
|
||||
if results:
|
||||
shutil.rmtree(results[0])
|
||||
|
||||
# clean up build results
|
||||
build_path = os.path.join(build_folder, "build")
|
||||
if os.path.exists(build_path):
|
||||
shutil.rmtree(build_path)
|
||||
|
||||
|
||||
def build_packages(
|
||||
targeted_packages: List[str],
|
||||
distribution_directory: str = None,
|
||||
|
|
|
@ -8,6 +8,7 @@ def output_ci_warning(message: str, location=None) -> None:
|
|||
if ci_type == 1:
|
||||
if not location:
|
||||
import inspect
|
||||
|
||||
source = inspect.stack()[1].filename
|
||||
else:
|
||||
source = location
|
||||
|
@ -17,7 +18,7 @@ def output_ci_warning(message: str, location=None) -> None:
|
|||
pass
|
||||
else:
|
||||
print("Unrecognized CI format, not outputting warning.")
|
||||
|
||||
|
||||
|
||||
def set_ci_variable(name: str, value: str) -> None:
|
||||
"""
|
||||
|
@ -29,9 +30,9 @@ def set_ci_variable(name: str, value: str) -> None:
|
|||
if ci_type == 1:
|
||||
print(f"##vso[task.setvariable variable={name}]{value}")
|
||||
elif ci_type == 2:
|
||||
env_file = os.getenv('GITHUB_ENV')
|
||||
env_file = os.getenv("GITHUB_ENV")
|
||||
|
||||
with open(env_file, "a") as env_file:
|
||||
env_file.write(f"{name}={value}")
|
||||
else:
|
||||
print(f"Unrecognized CI format, not setting variable \"{name}.\"")
|
||||
print(f'Unrecognized CI format, not setting variable "{name}."')
|
||||
|
|
|
@ -41,7 +41,7 @@ def is_check_enabled(package_path: str, check: str, default: Any = True) -> bool
|
|||
|
||||
In order:
|
||||
- Checks <CHECK>_OPT_OUT for package name.
|
||||
- Honors override variable if one is present: <PACKAGE-NAME>_<CHECK>.
|
||||
- Honors override variable if one is present: <PACKAGE_NAME>_<CHECK>. (Note the _ in the package name, `-` is not a valid env variable character.)
|
||||
- Finally falls back to the pyproject.toml at package root (if one exists) for a tools setting enabling/disabling <check>.
|
||||
"""
|
||||
if package_path.endswith("setup.py"):
|
||||
|
|
|
@ -364,108 +364,6 @@ def process_requires(setup_py_path: str):
|
|||
logging.info("Package requirement is updated in setup.py")
|
||||
|
||||
|
||||
def build_and_install_dev_reqs(file: str, pkg_root: str) -> None:
|
||||
"""This function builds whls for every requirement found in a package's
|
||||
dev_requirements.txt and installs it.
|
||||
|
||||
:param str file: the absolute path to the dev_requirements.txt file
|
||||
:param str pkg_root: the absolute path to the package's root
|
||||
:return: None
|
||||
"""
|
||||
adjusted_req_lines = []
|
||||
|
||||
with open(file, "r") as f:
|
||||
for line in f:
|
||||
args = [part.strip() for part in line.split() if part and not part.strip() == "-e"]
|
||||
amended_line = " ".join(args)
|
||||
|
||||
if amended_line.endswith("]"):
|
||||
trim_amount = amended_line[::-1].index("[") + 1
|
||||
amended_line = amended_line[0 : (len(amended_line) - trim_amount)]
|
||||
|
||||
adjusted_req_lines.append(amended_line)
|
||||
|
||||
adjusted_req_lines = list(map(lambda x: build_whl_for_req(x, pkg_root), adjusted_req_lines))
|
||||
install_deps_commands = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
]
|
||||
logging.info(f"Installing dev requirements from freshly built packages: {adjusted_req_lines}")
|
||||
install_deps_commands.extend(adjusted_req_lines)
|
||||
subprocess.check_call(install_deps_commands)
|
||||
shutil.rmtree(os.path.join(pkg_root, ".tmp_whl_dir"))
|
||||
|
||||
|
||||
def replace_dev_reqs(file, pkg_root):
|
||||
adjusted_req_lines = []
|
||||
|
||||
with open(file, "r") as f:
|
||||
original_req_lines = list(line.strip() for line in f)
|
||||
|
||||
for line in original_req_lines:
|
||||
args = [part.strip() for part in line.split() if part and not part.strip() == "-e"]
|
||||
amended_line = " ".join(args)
|
||||
extras = ""
|
||||
|
||||
if amended_line.endswith("]"):
|
||||
amended_line, extras = amended_line.rsplit("[", maxsplit=1)
|
||||
if extras:
|
||||
extras = f"[{extras}"
|
||||
|
||||
adjusted_req_lines.append(f"{build_whl_for_req(amended_line, pkg_root)}{extras}")
|
||||
|
||||
req_file_name = os.path.basename(file)
|
||||
logging.info("Old {0}:{1}".format(req_file_name, original_req_lines))
|
||||
logging.info("New {0}:{1}".format(req_file_name, adjusted_req_lines))
|
||||
|
||||
with open(file, "w") as f:
|
||||
# note that we directly use '\n' here instead of os.linesep due to how f.write() actually handles this stuff internally
|
||||
# If a file is opened in text mode (the default), during write python will accidentally double replace due to "\r" being
|
||||
# replaced with "\r\n" on Windows. Result: "\r\n\n". Extra line breaks!
|
||||
f.write("\n".join(adjusted_req_lines))
|
||||
|
||||
|
||||
def is_relative_install_path(req: str, package_path: str) -> str:
|
||||
possible_setup_path = os.path.join(package_path, req, "setup.py")
|
||||
|
||||
# blank lines are _allowed_ in a dev requirements. they should not resolve to the package_path erroneously
|
||||
if not req:
|
||||
return False
|
||||
|
||||
return os.path.exists(possible_setup_path)
|
||||
|
||||
|
||||
def build_whl_for_req(req: str, package_path: str) -> str:
|
||||
"""Builds a whl from the dev_requirements file.
|
||||
|
||||
:param str req: a requirement from the dev_requirements.txt
|
||||
:param str package_path: the absolute path to the package's root
|
||||
:return: The absolute path to the whl built or the requirement if a third-party package
|
||||
"""
|
||||
from ci_tools.build import create_package
|
||||
|
||||
if is_relative_install_path(req, package_path):
|
||||
# Create temp path if it doesn't exist
|
||||
temp_dir = os.path.join(package_path, ".tmp_whl_dir")
|
||||
if not os.path.exists(temp_dir):
|
||||
os.mkdir(temp_dir)
|
||||
|
||||
req_pkg_path = os.path.abspath(os.path.join(package_path, req.replace("\n", "")))
|
||||
parsed = ParsedSetup.from_path(req_pkg_path)
|
||||
|
||||
logging.info("Building wheel for package {}".format(parsed.name))
|
||||
create_package(req_pkg_path, temp_dir, enable_sdist=False)
|
||||
|
||||
whl_path = os.path.join(temp_dir, find_whl(temp_dir, parsed.name, parsed.version))
|
||||
logging.info("Wheel for package {0} is {1}".format(parsed.name, whl_path))
|
||||
logging.info("Replacing dev requirement. Old requirement:{0}, New requirement:{1}".format(req, whl_path))
|
||||
return whl_path
|
||||
else:
|
||||
return req
|
||||
|
||||
|
||||
def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> str:
|
||||
"""This function attempts to look within a directory (and all subdirs therein) and find a source distribution for the targeted package and version."""
|
||||
# This function will find a sdist for given package name
|
||||
|
@ -492,6 +390,100 @@ def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> str:
|
|||
return packages[0]
|
||||
|
||||
|
||||
def pip_install(requirements: List[str], include_dependencies: bool = True, python_executable: str = None) -> bool:
|
||||
"""
|
||||
Attempts to invoke an install operation using the invoking python's pip. Empty requirements are auto-success.
|
||||
"""
|
||||
|
||||
exe = python_executable or sys.executable
|
||||
|
||||
command = [exe, "-m", "pip", "install"]
|
||||
|
||||
if requirements:
|
||||
command.extend([req.strip() for req in requirements])
|
||||
else:
|
||||
return True
|
||||
|
||||
try:
|
||||
subprocess.check_call(command)
|
||||
except subprocess.CalledProcessError as f:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def pip_uninstall(requirements: List[str], python_executable: str) -> bool:
|
||||
"""
|
||||
Attempts to invoke an install operation using the invoking python's pip. Empty requirements are auto-success.
|
||||
"""
|
||||
exe = python_executable or sys.executable
|
||||
command = [exe, "-m", "pip", "uninstall", "-y"]
|
||||
|
||||
if requirements:
|
||||
command.extend([req.strip() for req in requirements])
|
||||
else:
|
||||
return True
|
||||
|
||||
try:
|
||||
result = subprocess.check_call(command)
|
||||
return True
|
||||
except subprocess.CalledProcessError as f:
|
||||
return False
|
||||
|
||||
|
||||
def pip_install_requirements_file(requirements_file: str, python_executable: str = None) -> bool:
|
||||
return pip_install(["-r", requirements_file], True, python_executable)
|
||||
|
||||
|
||||
def get_pip_list_output(python_executable: str = None):
|
||||
"""Uses the invoking python executable to get the output from pip list."""
|
||||
exe = python_executable or sys.executable
|
||||
|
||||
out = subprocess.Popen(
|
||||
[exe, "-m", "pip", "list", "--disable-pip-version-check", "--format", "freeze"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
stdout, stderr = out.communicate()
|
||||
|
||||
collected_output = {}
|
||||
|
||||
if stdout and (stderr is None):
|
||||
# this should be compatible with py27 https://docs.python.org/2.7/library/stdtypes.html#str.decode
|
||||
for line in stdout.decode("utf-8").split(os.linesep)[2:]:
|
||||
if line:
|
||||
package, version = re.split("==", line)
|
||||
collected_output[package] = version
|
||||
else:
|
||||
raise Exception(stderr)
|
||||
|
||||
return collected_output
|
||||
|
||||
|
||||
def pytest(args: [], cwd: str = None, python_executable: str = None) -> bool:
|
||||
"""
|
||||
Invokes a set of tests, returns true if successful, false otherwise.
|
||||
"""
|
||||
|
||||
exe = python_executable or sys.executable
|
||||
|
||||
commands = [
|
||||
exe,
|
||||
"-m",
|
||||
"pytest",
|
||||
]
|
||||
|
||||
commands.extend(args)
|
||||
|
||||
logging.info(commands)
|
||||
if cwd:
|
||||
result = subprocess.run(commands, cwd=cwd)
|
||||
else:
|
||||
result = subprocess.run(commands)
|
||||
|
||||
return result.returncode == 0
|
||||
|
||||
|
||||
def get_interpreter_compatible_tags() -> List[str]:
|
||||
"""
|
||||
This function invokes pip from the invoking interpreter and discovers which tags the interpreter is compatible with.
|
||||
|
|
|
@ -5,6 +5,7 @@ from ci_tools.parsing import ParsedSetup
|
|||
from ci_tools.variables import in_ci
|
||||
from ci_tools.environment_exclusions import is_check_enabled
|
||||
|
||||
|
||||
def entrypoint() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""This utility checks the keywords of a targeted python package. If the keyword 'azure sdk' is not present, error.""",
|
||||
|
@ -23,14 +24,13 @@ def entrypoint() -> None:
|
|||
|
||||
if in_ci():
|
||||
if not is_check_enabled(args.target, "verify_keywords"):
|
||||
logging.info(
|
||||
f"Package {pkg_details.name} opts-out of keyword verification check."
|
||||
)
|
||||
logging.info(f"Package {pkg_details.name} opts-out of keyword verification check.")
|
||||
exit(0)
|
||||
|
||||
if "azure sdk" not in pkg_details.keywords:
|
||||
print(f"Keyword 'azure sdk' not present in keywords for {pkg_details.name}. Before attempting publishing, ensure that package {pkg_details.name} has keyword 'azure sdk' present in the keyword array.")
|
||||
print(
|
||||
f"Keyword 'azure sdk' not present in keywords for {pkg_details.name}. Before attempting publishing, ensure that package {pkg_details.name} has keyword 'azure sdk' present in the keyword array."
|
||||
)
|
||||
exit(1)
|
||||
else:
|
||||
exit(0)
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from .parse_functions import (
|
||||
parse_setup,
|
||||
parse_require,
|
||||
parse_requirements_file,
|
||||
get_name_from_specifier,
|
||||
ParsedSetup,
|
||||
parse_freeze_output,
|
||||
read_setup_py_content,
|
||||
get_build_config,
|
||||
get_config_setting,
|
||||
|
@ -13,9 +13,9 @@ from .parse_functions import (
|
|||
__all__ = [
|
||||
"parse_setup",
|
||||
"parse_require",
|
||||
"parse_requirements_file",
|
||||
"get_name_from_specifier",
|
||||
"ParsedSetup",
|
||||
"parse_freeze_output",
|
||||
"read_setup_py_content",
|
||||
"get_build_config",
|
||||
"get_config_setting",
|
||||
|
|
|
@ -313,7 +313,7 @@ def parse_require(req: str) -> Tuple[str, SpecifierSet]:
|
|||
return (pkg_name, spec)
|
||||
|
||||
|
||||
def parse_requirements_file(file_location: str) -> Dict[str, str]:
|
||||
def parse_freeze_output(file_location: str) -> Dict[str, str]:
|
||||
"""
|
||||
Takes a python requirements file and returns a dictionary representing the contents.
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
from .generation import prepare_and_test_optional
|
||||
from .managed_virtual_env import ManagedVirtualEnv
|
||||
|
||||
__all__ = ["prepare_and_test_optional", "ManagedVirtualEnv"]
|
|
@ -0,0 +1,417 @@
|
|||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
from ci_tools.environment_exclusions import is_check_enabled
|
||||
from ci_tools.variables import in_ci
|
||||
from ci_tools.parsing import ParsedSetup
|
||||
from ci_tools.functions import (
|
||||
get_config_setting,
|
||||
discover_prebuilt_package,
|
||||
pip_install,
|
||||
pip_uninstall,
|
||||
)
|
||||
from ci_tools.build import cleanup_build_artifacts, create_package
|
||||
from ci_tools.parsing import ParsedSetup, parse_require
|
||||
from ci_tools.functions import get_package_from_repo, find_whl, get_pip_list_output, pytest
|
||||
from .managed_virtual_env import ManagedVirtualEnv
|
||||
|
||||
|
||||
def prepare_environment(package_folder: str, venv_directory: str, env_name: str) -> str:
|
||||
"""
|
||||
Empties the venv_directory directory and creates a virtual environment within. Returns the path to the new python executable.
|
||||
"""
|
||||
venv = ManagedVirtualEnv(venv_directory, env_name)
|
||||
venv.create()
|
||||
|
||||
return venv.python_executable
|
||||
|
||||
|
||||
def create_package_and_install(
|
||||
distribution_directory: str,
|
||||
target_setup: str,
|
||||
skip_install: bool,
|
||||
cache_dir: str,
|
||||
work_dir: str,
|
||||
force_create: bool,
|
||||
package_type: str,
|
||||
pre_download_disabled: bool,
|
||||
python_executable: str = None,
|
||||
) -> None:
|
||||
"""
|
||||
Workhorse for singular package installation given a package and a possible prebuilt wheel directory. Handles installation of both package AND dependencies, handling compatibility
|
||||
issues where possible.
|
||||
"""
|
||||
|
||||
python_exe = python_executable or sys.executable
|
||||
|
||||
commands_options = []
|
||||
built_pkg_path = ""
|
||||
setup_py_path = os.path.join(target_setup, "setup.py")
|
||||
additional_downloaded_reqs = []
|
||||
|
||||
if not os.path.exists(distribution_directory):
|
||||
os.mkdir(distribution_directory)
|
||||
|
||||
tmp_dl_folder = os.path.join(distribution_directory, "dl")
|
||||
if not os.path.exists(tmp_dl_folder):
|
||||
os.mkdir(tmp_dl_folder)
|
||||
|
||||
# preview version is enabled when installing dev build so pip will install dev build version from devpos feed
|
||||
if os.getenv("SetDevVersion", "false") == "true":
|
||||
commands_options.append("--pre")
|
||||
|
||||
if cache_dir:
|
||||
commands_options.extend(["--cache-dir", cache_dir])
|
||||
|
||||
discovered_packages = discover_packages(
|
||||
setup_py_path, distribution_directory, target_setup, package_type, force_create
|
||||
)
|
||||
|
||||
if skip_install:
|
||||
logging.info("Flag to skip install whl is passed. Skipping package installation")
|
||||
else:
|
||||
for built_package in discovered_packages:
|
||||
if os.getenv("PREBUILT_WHEEL_DIR") is not None and not force_create:
|
||||
# find the prebuilt package in the set of prebuilt wheels
|
||||
package_path = os.path.join(os.environ["PREBUILT_WHEEL_DIR"], built_package)
|
||||
if os.path.isfile(package_path):
|
||||
built_pkg_path = package_path
|
||||
logging.info("Installing {w} from directory".format(w=built_package))
|
||||
# it does't exist, so we need to error out
|
||||
else:
|
||||
logging.error("{w} not present in the prebuilt package directory. Exiting.".format(w=built_package))
|
||||
exit(1)
|
||||
else:
|
||||
built_pkg_path = os.path.abspath(os.path.join(distribution_directory, built_package))
|
||||
logging.info("Installing {w} from fresh built package.".format(w=built_package))
|
||||
|
||||
if not pre_download_disabled:
|
||||
requirements = ParsedSetup.from_path(os.path.join(os.path.abspath(target_setup), "setup.py")).requires
|
||||
azure_requirements = [req.split(";")[0] for req in requirements if req.startswith("azure")]
|
||||
|
||||
if azure_requirements:
|
||||
logging.info(
|
||||
"Found {} azure requirement(s): {}".format(len(azure_requirements), azure_requirements)
|
||||
)
|
||||
|
||||
download_command = [
|
||||
python_exe,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"-d",
|
||||
tmp_dl_folder,
|
||||
"--no-deps",
|
||||
]
|
||||
|
||||
installation_additions = []
|
||||
|
||||
# only download a package if the requirement is not already met, so walk across
|
||||
# direct install_requires
|
||||
for req in azure_requirements:
|
||||
addition_necessary = True
|
||||
# get all installed packages
|
||||
installed_pkgs = get_pip_list_output(python_exe)
|
||||
|
||||
# parse the specifier
|
||||
req_name, req_specifier = parse_require(req)
|
||||
|
||||
# if we have the package already present...
|
||||
if req_name in installed_pkgs:
|
||||
# if there is no specifier for the requirement, we can ignore it
|
||||
if req_specifier is None:
|
||||
addition_necessary = False
|
||||
|
||||
# ...do we need to install the new version? if the existing specifier matches, we're fine
|
||||
if req_specifier is not None and installed_pkgs[req_name] in req_specifier:
|
||||
addition_necessary = False
|
||||
|
||||
if addition_necessary:
|
||||
# we only want to add an additional rec for download if it actually exists
|
||||
# in the upstream feed (either dev or pypi)
|
||||
# if it doesn't, we should just install the relative dep if its an azure package
|
||||
installation_additions.append(req)
|
||||
|
||||
if installation_additions:
|
||||
non_present_reqs = []
|
||||
for addition in installation_additions:
|
||||
try:
|
||||
subprocess.check_call(
|
||||
download_command + [addition] + commands_options,
|
||||
env=dict(os.environ, PIP_EXTRA_INDEX_URL=""),
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
req_name, req_specifier = parse_require(addition)
|
||||
non_present_reqs.append(req_name)
|
||||
|
||||
additional_downloaded_reqs = [
|
||||
os.path.abspath(os.path.join(tmp_dl_folder, pth)) for pth in os.listdir(tmp_dl_folder)
|
||||
] + [get_package_from_repo(relative_req).folder for relative_req in non_present_reqs]
|
||||
|
||||
commands = [python_exe, "-m", "pip", "install", built_pkg_path]
|
||||
commands.extend(additional_downloaded_reqs)
|
||||
commands.extend(commands_options)
|
||||
|
||||
if work_dir and os.path.exists(work_dir):
|
||||
logging.info("Executing command from {0}:{1}".format(work_dir, commands))
|
||||
subprocess.check_call(commands, cwd=work_dir)
|
||||
else:
|
||||
subprocess.check_call(commands)
|
||||
logging.info("Installed {w}".format(w=built_package))
|
||||
|
||||
|
||||
def replace_dev_reqs(file: str, pkg_root: str) -> None:
|
||||
"""Takes a target requirements file, replaces all local relative install locations with wheels assembled from whatever that target path was.
|
||||
This is an extremely important step that runs on every dev_requirements.txt file before invoking any tox runs.
|
||||
|
||||
This is due to the fact that pip isn't multi-process-safe with the activity of installing a local relative requirement. .pyc files are updated
|
||||
and removed in place, possibly causing a hang in the install process. When in_ci() is true, this function is run against every single requirement file.
|
||||
|
||||
:param str file: the absolute path to the dev_requirements.txt file
|
||||
:param str pkg_root: the absolute path to the package's root
|
||||
:return: None
|
||||
"""
|
||||
adjusted_req_lines = []
|
||||
|
||||
with open(file, "r") as f:
|
||||
original_req_lines = list(line.strip() for line in f)
|
||||
|
||||
for line in original_req_lines:
|
||||
args = [part.strip() for part in line.split() if part and not part.strip() == "-e"]
|
||||
amended_line = " ".join(args)
|
||||
extras = ""
|
||||
|
||||
if amended_line.endswith("]"):
|
||||
amended_line, extras = amended_line.rsplit("[", maxsplit=1)
|
||||
if extras:
|
||||
extras = f"[{extras}"
|
||||
|
||||
adjusted_req_lines.append(f"{build_whl_for_req(amended_line, pkg_root)}{extras}")
|
||||
|
||||
req_file_name = os.path.basename(file)
|
||||
logging.info("Old {0}:{1}".format(req_file_name, original_req_lines))
|
||||
logging.info("New {0}:{1}".format(req_file_name, adjusted_req_lines))
|
||||
|
||||
with open(file, "w") as f:
|
||||
# note that we directly use '\n' here instead of os.linesep due to how f.write() actually handles this stuff internally
|
||||
# If a file is opened in text mode (the default), during write python will accidentally double replace due to "\r" being
|
||||
# replaced with "\r\n" on Windows. Result: "\r\n\n". Extra line breaks!
|
||||
f.write("\n".join(adjusted_req_lines))
|
||||
|
||||
|
||||
def discover_packages(
|
||||
setup_path: str, distribution_directory: str, target_setup: str, package_type: str, force_create: bool
|
||||
):
|
||||
packages = []
|
||||
if os.getenv("PREBUILT_WHEEL_DIR") is not None and not force_create:
|
||||
packages = discover_prebuilt_package(os.getenv("PREBUILT_WHEEL_DIR"), setup_path, package_type)
|
||||
pkg = ParsedSetup.from_path(setup_path)
|
||||
|
||||
if not packages:
|
||||
logging.error(
|
||||
"Package is missing in prebuilt directory {0} for package {1} and version {2}".format(
|
||||
os.getenv("PREBUILT_WHEEL_DIR"), pkg.name, pkg.version
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
else:
|
||||
packages = build_and_discover_package(
|
||||
setup_path,
|
||||
distribution_directory,
|
||||
target_setup,
|
||||
package_type,
|
||||
)
|
||||
return packages
|
||||
|
||||
|
||||
def build_and_install_dev_reqs(file: str, pkg_root: str) -> None:
|
||||
"""This function builds whls for every requirement found in a package's
|
||||
dev_requirements.txt and installs it.
|
||||
|
||||
:param str file: the absolute path to the dev_requirements.txt file
|
||||
:param str pkg_root: the absolute path to the package's root
|
||||
:return: None
|
||||
"""
|
||||
adjusted_req_lines = []
|
||||
|
||||
with open(file, "r") as f:
|
||||
for line in f:
|
||||
args = [part.strip() for part in line.split() if part and not part.strip() == "-e"]
|
||||
amended_line = " ".join(args)
|
||||
|
||||
if amended_line.endswith("]"):
|
||||
trim_amount = amended_line[::-1].index("[") + 1
|
||||
amended_line = amended_line[0 : (len(amended_line) - trim_amount)]
|
||||
|
||||
adjusted_req_lines.append(amended_line)
|
||||
|
||||
adjusted_req_lines = list(map(lambda x: build_whl_for_req(x, pkg_root), adjusted_req_lines))
|
||||
install_deps_commands = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
]
|
||||
logging.info(f"Installing dev requirements from freshly built packages: {adjusted_req_lines}")
|
||||
install_deps_commands.extend(adjusted_req_lines)
|
||||
subprocess.check_call(install_deps_commands)
|
||||
shutil.rmtree(os.path.join(pkg_root, ".tmp_whl_dir"))
|
||||
|
||||
|
||||
def is_relative_install_path(req: str, package_path: str) -> str:
|
||||
possible_setup_path = os.path.join(package_path, req, "setup.py")
|
||||
|
||||
# blank lines are _allowed_ in a dev requirements. they should not resolve to the package_path erroneously
|
||||
if not req:
|
||||
return False
|
||||
|
||||
return os.path.exists(possible_setup_path)
|
||||
|
||||
|
||||
def build_whl_for_req(req: str, package_path: str) -> str:
|
||||
"""Builds a whl from the dev_requirements file.
|
||||
|
||||
:param str req: a requirement from the dev_requirements.txt
|
||||
:param str package_path: the absolute path to the package's root
|
||||
:return: The absolute path to the whl built or the requirement if a third-party package
|
||||
"""
|
||||
from ci_tools.build import create_package
|
||||
|
||||
if is_relative_install_path(req, package_path):
|
||||
# Create temp path if it doesn't exist
|
||||
temp_dir = os.path.join(package_path, ".tmp_whl_dir")
|
||||
if not os.path.exists(temp_dir):
|
||||
os.mkdir(temp_dir)
|
||||
|
||||
req_pkg_path = os.path.abspath(os.path.join(package_path, req.replace("\n", "")))
|
||||
parsed = ParsedSetup.from_path(req_pkg_path)
|
||||
|
||||
logging.info("Building wheel for package {}".format(parsed.name))
|
||||
create_package(req_pkg_path, temp_dir, enable_sdist=False)
|
||||
|
||||
whl_path = os.path.join(temp_dir, find_whl(temp_dir, parsed.name, parsed.version))
|
||||
logging.info("Wheel for package {0} is {1}".format(parsed.name, whl_path))
|
||||
logging.info("Replacing dev requirement. Old requirement:{0}, New requirement:{1}".format(req, whl_path))
|
||||
return whl_path
|
||||
else:
|
||||
return req
|
||||
|
||||
|
||||
def build_and_discover_package(setup_path: str, dist_dir: str, target_setup: str, package_type):
|
||||
if package_type == "wheel":
|
||||
create_package(setup_path, dist_dir, enable_sdist=False)
|
||||
else:
|
||||
create_package(setup_path, dist_dir, enable_wheel=False)
|
||||
|
||||
prebuilt_packages = [
|
||||
f for f in os.listdir(dist_dir) if f.endswith(".whl" if package_type == "wheel" else ".tar.gz")
|
||||
]
|
||||
|
||||
if not in_ci():
|
||||
logging.info("Cleaning up build directories and files")
|
||||
cleanup_build_artifacts(target_setup)
|
||||
return prebuilt_packages
|
||||
|
||||
|
||||
def prepare_and_test_optional(mapped_args: argparse.Namespace) -> int:
|
||||
parsed_package = ParsedSetup.from_path(mapped_args.target)
|
||||
|
||||
if in_ci():
|
||||
if not is_check_enabled(mapped_args.target, "optional", False):
|
||||
logging.info(f"Package {parsed_package.package_name} opts-out of optional check.")
|
||||
return 0
|
||||
|
||||
optional_configs = get_config_setting(mapped_args.target, "optional")
|
||||
|
||||
if len(optional_configs) == 0:
|
||||
logging.info(f"No optional environments detected in pyproject.toml within {mapped_args.target}.")
|
||||
return 0
|
||||
|
||||
config_results = []
|
||||
|
||||
for config in optional_configs:
|
||||
env_name = config.get("name")
|
||||
|
||||
if mapped_args.optional:
|
||||
if env_name != mapped_args.optional:
|
||||
logging.info(f"{env_name} does not match targeted environment {mapped_args.optional}, skipping this environment.")
|
||||
continue
|
||||
|
||||
environment_exe = prepare_environment(mapped_args.target, mapped_args.temp_dir, env_name)
|
||||
|
||||
# install the package (either building manually or pulling from prebuilt directory)
|
||||
create_package_and_install(
|
||||
distribution_directory=mapped_args.temp_dir,
|
||||
target_setup=mapped_args.target,
|
||||
skip_install=False,
|
||||
cache_dir=None, # todo, resolve this for CI builds
|
||||
work_dir=mapped_args.temp_dir,
|
||||
force_create=False,
|
||||
package_type="wheel",
|
||||
pre_download_disabled=False,
|
||||
python_executable=environment_exe,
|
||||
)
|
||||
|
||||
dev_reqs = os.path.join(mapped_args.target, "dev_requirements.txt")
|
||||
test_tools = os.path.join(mapped_args.target, "..", "..", "..", "eng", "test_tools.txt")
|
||||
|
||||
# install the dev requirements and test_tools requirements files to ensure tests can run
|
||||
install_result = pip_install(["-r", dev_reqs, "-r", test_tools], python_executable=environment_exe)
|
||||
if not install_result:
|
||||
logging.error(
|
||||
f"Unable to complete installation of dev_requirements.txt/ci_tools.txt for {parsed_package.name}, check command output above."
|
||||
)
|
||||
config_results.append(False)
|
||||
break
|
||||
|
||||
# install any packages that are added in the optional config
|
||||
additional_installs = config.get("install", [])
|
||||
install_result = pip_install(additional_installs, python_executable=environment_exe)
|
||||
if not install_result:
|
||||
logging.error(
|
||||
f"Unable to complete installation of additional packages {additional_installs} for {parsed_package.name}, check command output above."
|
||||
)
|
||||
config_results.append(False)
|
||||
break
|
||||
|
||||
# uninstall any configured packages from the optional config
|
||||
additional_uninstalls = config.get("uninstall", [])
|
||||
uninstall_result = pip_uninstall(additional_uninstalls, python_executable=environment_exe)
|
||||
if not uninstall_result:
|
||||
logging.error(
|
||||
f"Unable to complete removal of packages targeted for uninstall {additional_uninstalls} for {parsed_package.name}, check command output above."
|
||||
)
|
||||
config_results.append(False)
|
||||
break
|
||||
|
||||
# invoke tests
|
||||
pytest_args = [
|
||||
"-rsfE",
|
||||
f"--junitxml={mapped_args.target}/test-junit-optional-{env_name}.xml",
|
||||
"--verbose",
|
||||
"--durations=10",
|
||||
"--ignore=azure",
|
||||
"--ignore=.tox",
|
||||
"--ignore=build",
|
||||
"--ignore=.eggs",
|
||||
mapped_args.target,
|
||||
]
|
||||
pytest_args.extend(config.get("additional_pytest_args", []))
|
||||
logging.info(f"Invoking tests for package {parsed_package.name} and optional environment {env_name}")
|
||||
config_results.append(pytest(pytest_args, python_executable=environment_exe))
|
||||
|
||||
if all(config_results):
|
||||
logging.info(f"All optional environment(s) for {parsed_package.name} completed successfully.")
|
||||
sys.exit(0)
|
||||
else:
|
||||
for i, config in enumerate(optional_configs):
|
||||
if not config_results[i]:
|
||||
config_name = config.get("name")
|
||||
logging.error(
|
||||
f"Optional environment {config_name} for {parsed_package.name} completed with non-zero exit-code. Check test results above."
|
||||
)
|
||||
sys.exit(1)
|
|
@ -0,0 +1,28 @@
|
|||
import os, logging, glob, subprocess, sys
|
||||
from ci_tools.functions import cleanup_directory
|
||||
|
||||
|
||||
class ManagedVirtualEnv:
|
||||
def __init__(self, path: str, name: str):
|
||||
self.path = os.path.join(path, name)
|
||||
|
||||
# todo: do we want to accept refresh?
|
||||
if os.path.exists(self.path):
|
||||
cleanup_directory(self.path)
|
||||
|
||||
def create(self):
|
||||
logging.info("Creating virtual environment [{}]".format(self.path))
|
||||
subprocess.check_call([sys.executable, "-m", "venv", "ENV_DIR", self.path])
|
||||
self.python_executable = self._find_python_executable()
|
||||
|
||||
def clear_venv(self):
|
||||
subprocess.check_call([sys.executable, "-m", "venv", "--clear", "ENV_DIR", self.path])
|
||||
|
||||
def _find_python_executable(self):
|
||||
paths = glob.glob(os.path.join(self.path, "*", "python")) + glob.glob(
|
||||
os.path.join(self.path, "*", "python.exe")
|
||||
)
|
||||
if not paths:
|
||||
logging.error(f"Failed to find path to python executable in virtual env:{self.path}")
|
||||
sys.exit(1)
|
||||
return paths[0]
|
|
@ -1,97 +0,0 @@
|
|||
from pypi_tools.pypi import PyPIClient
|
||||
from ci_tools.parsing import ParsedSetup
|
||||
|
||||
DEV_BUILD_IDENTIFIER = "a"
|
||||
|
||||
|
||||
def update_requires(setup_py_path, requires_dict):
|
||||
# This method changes package requirement by overriding the specifier
|
||||
contents = []
|
||||
with open(setup_py_path, "r") as setup_file:
|
||||
contents = setup_file.readlines()
|
||||
|
||||
# find and replace all existing package requirement with new requirement
|
||||
for i in range(0, len(contents) - 1):
|
||||
keys = [k for k in requires_dict.keys() if k in contents[i]]
|
||||
for key in keys:
|
||||
contents[i] = contents[i].replace(key, requires_dict[key])
|
||||
|
||||
with open(setup_py_path, "w") as setup_file:
|
||||
setup_file.writelines(contents)
|
||||
|
||||
|
||||
def is_required_version_on_pypi(package_name, spec):
|
||||
client = PyPIClient()
|
||||
try:
|
||||
pypi_results = client.get_ordered_versions(package_name)
|
||||
except:
|
||||
pypi_results = []
|
||||
|
||||
versions = [str(v) for v in pypi_results if str(v) in spec]
|
||||
return versions
|
||||
|
||||
|
||||
def get_version(pkg_name):
|
||||
# find version for the package from source. This logic should be revisited to find version from devops feed
|
||||
glob_path = os.path.join(root_dir, "sdk", "*", pkg_name, "setup.py")
|
||||
paths = glob.glob(glob_path)
|
||||
if paths:
|
||||
setup_py_path = paths[0]
|
||||
parsed_setup = ParsedSetup.from_path(setup_py_path)
|
||||
# Remove dev build part if version for this package is already updated to dev build
|
||||
# When building package with dev build version, version for packages in same service is updated to dev build
|
||||
# and other packages will not have dev build number
|
||||
# strip dev build number so we can check if package exists in PyPI and replace
|
||||
|
||||
version_obj = Version(parsed_setup.version)
|
||||
if version_obj.pre:
|
||||
if version_obj.pre[0] == DEV_BUILD_IDENTIFIER:
|
||||
version = version_obj.base_version
|
||||
|
||||
return version
|
||||
else:
|
||||
logging.error("setup.py is not found for package {} to identify current version".format(pkg_name))
|
||||
exit(1)
|
||||
|
||||
|
||||
def get_base_version(pkg_name):
|
||||
# find version for the package from source. This logic should be revisited to find version from devops feed
|
||||
glob_path = os.path.join(root_dir, "sdk", "*", pkg_name, "setup.py")
|
||||
paths = glob.glob(glob_path)
|
||||
if paths:
|
||||
setup_py_path = paths[0]
|
||||
parsed_setup = ParsedSetup.from_path(setup_py_path)
|
||||
|
||||
version_obj = Version(parsed_setup.version)
|
||||
return version_obj.base_version
|
||||
else:
|
||||
logging.error("setup.py is not found for package {} to identify current version".format(pkg_name))
|
||||
exit(1)
|
||||
|
||||
|
||||
def process_requires(setup_py_path):
|
||||
# This method process package requirement to verify if all required packages are available on PyPI
|
||||
# If any azure sdk package is not available on PyPI then requirement will be updated to refer dev version
|
||||
requires = [Requirement.parse(r) for r in ParsedSetup.from_path(setup_py_path).requires if r.startswith("azure")]
|
||||
|
||||
# Find package requirements that are not available on PyPI
|
||||
requirement_to_update = {}
|
||||
for req in requires:
|
||||
pkg_name = req.key
|
||||
spec = SpecifierSet(str(req).replace(pkg_name, ""))
|
||||
|
||||
if not is_required_version_on_pypi(pkg_name, spec):
|
||||
old_req = str(req)
|
||||
version = get_version(pkg_name)
|
||||
base_version = get_base_version(pkg_name)
|
||||
logging.info("Updating version {0} in requirement {1} to dev build version".format(version, old_req))
|
||||
new_req = old_req.replace(version, "{}{}".format(base_version, DEV_BUILD_IDENTIFIER))
|
||||
logging.info("New requirement for package {0}: {1}".format(pkg_name, new_req))
|
||||
requirement_to_update[old_req] = new_req
|
||||
|
||||
if not requirement_to_update:
|
||||
logging.info("All required packages are available on PyPI")
|
||||
else:
|
||||
logging.info("Packages not available on PyPI:{}".format(requirement_to_update))
|
||||
update_requires(setup_py_path, requirement_to_update)
|
||||
logging.info("Package requirement is updated in setup.py")
|
|
@ -18,15 +18,18 @@ not_up_to_date = False
|
|||
target_snippet_sources = ["samples/*.py", "samples/*/*.py"]
|
||||
target_md_files = ["README.md"]
|
||||
|
||||
|
||||
def check_snippets() -> Dict:
|
||||
return snippets
|
||||
|
||||
|
||||
def check_not_up_to_date() -> bool:
|
||||
return not_up_to_date
|
||||
|
||||
|
||||
def get_snippet(file: str) -> None:
|
||||
file_obj = Path(file)
|
||||
with open(file_obj, 'r', encoding='utf8') as f:
|
||||
with open(file_obj, "r", encoding="utf8") as f:
|
||||
content = f.read()
|
||||
pattern = "# \\[START(?P<name>[A-Z a-z0-9_]+)\\](?P<body>[\\s\\S]+?)# \\[END[A-Z a-z0-9_]+\\]"
|
||||
matches = re.findall(pattern, content)
|
||||
|
@ -44,7 +47,7 @@ def get_snippet(file: str) -> None:
|
|||
# return await pipeline.run(request)
|
||||
# # [END trio]
|
||||
# \n
|
||||
# On one hand, the spaces in the beginning of the line may vary. e.g. If the snippet
|
||||
# On one hand, the spaces in the beginning of the line may vary. e.g. If the snippet
|
||||
# is in a class, it may have more spaces than if it is not in a class.
|
||||
# On the other hand, we cannot remove all spaces because indents are part of Python syntax.
|
||||
# Here is our algorithm:
|
||||
|
@ -75,7 +78,7 @@ def get_snippet(file: str) -> None:
|
|||
|
||||
def update_snippet(file: str) -> None:
|
||||
file_obj = Path(file)
|
||||
with open(file_obj, 'r', encoding='utf8') as f:
|
||||
with open(file_obj, "r", encoding="utf8") as f:
|
||||
content = f.read()
|
||||
pattern = r"(?P<content>(?P<header><!-- SNIPPET:(?P<name>[A-Z a-z0-9_.]+)-->)[\n]+```python\n[\s\S]*?\n<!-- END SNIPPET -->)"
|
||||
matches = re.findall(pattern, content, flags=re.MULTILINE)
|
||||
|
@ -95,7 +98,7 @@ def update_snippet(file: str) -> None:
|
|||
global not_up_to_date
|
||||
not_up_to_date = True
|
||||
content = content.replace(body, target_code)
|
||||
with open(file_obj, 'w', encoding='utf8') as f:
|
||||
with open(file_obj, "w", encoding="utf8") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
|
@ -104,9 +107,7 @@ if __name__ == "__main__":
|
|||
parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help=(
|
||||
"The targeted path for update."
|
||||
),
|
||||
help=("The targeted path for update."),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
path = sys.argv[1]
|
||||
|
@ -126,6 +127,8 @@ if __name__ == "__main__":
|
|||
except UnicodeDecodeError:
|
||||
pass
|
||||
if not_up_to_date:
|
||||
_LOGGER.error(f'Error: code snippets are out of sync. Please run Python python_snippet_updater.py "{path}" to fix it.')
|
||||
_LOGGER.error(
|
||||
f'Error: code snippets are out of sync. Please run Python python_snippet_updater.py "{path}" to fix it.'
|
||||
)
|
||||
exit(1)
|
||||
_LOGGER.info(f"README.md under {path} is up to date.")
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
import os
|
||||
import tempfile
|
||||
from .python_snippet_updater import get_snippet, update_snippet, check_snippets, check_not_up_to_date
|
||||
from ci_tools.snippet_update.python_snippet_updater import (
|
||||
get_snippet,
|
||||
update_snippet,
|
||||
check_snippets,
|
||||
check_not_up_to_date,
|
||||
)
|
||||
|
||||
|
||||
def test_update_snippet():
|
||||
temp_sample = tempfile.NamedTemporaryFile(delete=False)
|
||||
|
@ -12,7 +18,7 @@ def test_update_snippet():
|
|||
return await pipeline.run(request)
|
||||
# [END trio]
|
||||
"""
|
||||
temp_sample.write(snippets.encode('utf-8'))
|
||||
temp_sample.write(snippets.encode("utf-8"))
|
||||
temp_sample.close()
|
||||
full_path_sample = temp_sample.name
|
||||
get_snippet(full_path_sample)
|
||||
|
@ -23,8 +29,11 @@ def test_update_snippet():
|
|||
snippet_name = list(keys)[0]
|
||||
|
||||
temp_readme = tempfile.NamedTemporaryFile(delete=False)
|
||||
readme = """
|
||||
<!-- SNIPPET:""" + snippet_name + """ -->
|
||||
readme = (
|
||||
"""
|
||||
<!-- SNIPPET:"""
|
||||
+ snippet_name
|
||||
+ """ -->
|
||||
|
||||
```python
|
||||
import os
|
||||
|
@ -38,10 +47,11 @@ text_analytics_client = TextAnalyticsClient(endpoint, AzureKeyCredential(key))
|
|||
|
||||
<!-- END SNIPPET -->
|
||||
"""
|
||||
temp_readme.write(readme.encode('utf-8'))
|
||||
)
|
||||
temp_readme.write(readme.encode("utf-8"))
|
||||
temp_readme.close()
|
||||
update_snippet(temp_readme.name)
|
||||
with open(temp_readme.name, 'rb') as file:
|
||||
with open(temp_readme.name, "rb") as file:
|
||||
content = file.read()
|
||||
print(content)
|
||||
|
||||
|
|
|
@ -39,7 +39,11 @@ from typing import List
|
|||
|
||||
|
||||
def path_excluded(path, additional_excludes):
|
||||
return any([excl in path for excl in additional_excludes]) or "tests" in os.path.normpath(path).split(os.sep) or is_metapackage(path)
|
||||
return (
|
||||
any([excl in path for excl in additional_excludes])
|
||||
or "tests" in os.path.normpath(path).split(os.sep)
|
||||
or is_metapackage(path)
|
||||
)
|
||||
|
||||
|
||||
# Metapackages do not have an 'azure' folder within them
|
||||
|
|
|
@ -15,7 +15,10 @@ from azure.core.exceptions import ResourceNotFoundError
|
|||
from azure.core.pipeline.policies import ContentDecodePolicy
|
||||
|
||||
# the functions we patch
|
||||
from azure.core.pipeline.transport import RequestsTransport
|
||||
try:
|
||||
from azure.core.pipeline.transport import RequestsTransport
|
||||
except:
|
||||
pass
|
||||
|
||||
from .helpers import get_test_id, is_live, is_live_and_not_recording
|
||||
from .proxy_testcase import start_record_or_playback, stop_record_or_playback, transform_request
|
||||
|
|
|
@ -13,7 +13,10 @@ from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
|
|||
from azure.core.pipeline.policies import ContentDecodePolicy
|
||||
|
||||
# the functions we patch
|
||||
from azure.core.pipeline.transport import RequestsTransport
|
||||
try:
|
||||
from azure.core.pipeline.transport import RequestsTransport
|
||||
except:
|
||||
pass
|
||||
|
||||
# the trimming function to clean up incoming arguments to the test function we are wrapping
|
||||
from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
|
||||
|
|
|
@ -49,7 +49,7 @@ setup(
|
|||
"sdk_analyze_deps=ci_tools.dependency_analysis:analyze_dependencies",
|
||||
"sdk_find_invalid_versions=ci_tools.versioning.find_invalid_versions:find_invalid_versions_main",
|
||||
"sdk_verify_keywords=ci_tools.keywords_verify:entrypoint",
|
||||
"multiapi_combiner=packaging_tools.multiapi_combiner:combine",
|
||||
"multiapi_combiner=packaging_tools.multiapi_combiner:combine"
|
||||
],
|
||||
},
|
||||
extras_require={
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# azure-sdk-tools `integration` tests
|
||||
|
||||
## Integration with the repo
|
||||
|
||||
These tests are within the `integration` folder because these tests are designed to run on _actual source from the repo_. Given that, these tests are guaranteed to be broken by actual code changes eventually. They are kept separate here for this exact reason.
|
||||
|
||||
See `test_package_discovery.py` or `proxy` folder for an examples.
|
||||
|
||||
## Manually created scenarios
|
||||
|
||||
The second category of `integration` tests are those whose scenario is created specifically for the test. Those will be present under `scenarios`.
|
||||
|
||||
Each folder within the `scenarios` folder will be used by one or multiple integration tests.
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
version = "1.0.0b1"
|
||||
|
||||
setup(
|
||||
name="coretestserver",
|
||||
version=version,
|
||||
include_package_data=True,
|
||||
description="A fake package that can be installed",
|
||||
license="MIT License",
|
||||
author="Microsoft Corporation",
|
||||
packages=find_packages(),
|
||||
install_requires=[
|
||||
"flask==2.2.2",
|
||||
],
|
||||
)
|
Двоичные данные
tools/azure-sdk-tools/tests/integration/scenarios/complex_requirements/optional-environment-test-1.0.0.zip
Normal file
Двоичные данные
tools/azure-sdk-tools/tests/integration/scenarios/complex_requirements/optional-environment-test-1.0.0.zip
Normal file
Двоичный файл не отображается.
|
@ -0,0 +1,11 @@
|
|||
trio
|
||||
requests
|
||||
aiohttp>=3.0
|
||||
opencensus>=0.6.0
|
||||
opencensus-ext-azure
|
||||
opencensus-ext-threading
|
||||
-e ../../../../../../tools/azure-sdk-tools
|
||||
-e ../../../../../../tools/azure-devtools
|
||||
a/contained/package
|
||||
https://docsupport.blob.core.windows.net/repackaged/azure-core-1.5.0.zip
|
||||
./optional-environment-test-1.0.0.zip
|
|
@ -0,0 +1,3 @@
|
|||
# Integration Scenario
|
||||
|
||||
This scenario is intended to exercise the various options of the `optional` environment. Check the values present in `pyproject.toml` under `tool.azure-sdk-build.optional`. This is merely the scenario, and none of the test code. Check `test_optional_functionality.py` for the actual test cases.
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@ -0,0 +1,6 @@
|
|||
-e ../../../../../../tools/azure-sdk-tools
|
||||
../../../../../../sdk/core/azure-core
|
||||
-e ../../../../../../tools/azure-devtools
|
||||
aiohttp
|
||||
requests
|
||||
-e .
|
|
@ -0,0 +1,19 @@
|
|||
[tool.azure-sdk-build]
|
||||
mypy = true
|
||||
type_check_samples = true
|
||||
verifytypes = true
|
||||
pyright = true
|
||||
pylint = true
|
||||
black = true
|
||||
|
||||
[[tool.azure-sdk-build.optional]]
|
||||
name = "no_requests"
|
||||
install = []
|
||||
uninstall = ["requests"]
|
||||
additional_pytest_args = ["-k", "*_async.py"]
|
||||
|
||||
[[tool.azure-sdk-build.optional]]
|
||||
name = "no_aiohttp"
|
||||
install = []
|
||||
uninstall = ["aiohttp"]
|
||||
additional_pytest_args = ["-k", "not *_async.py"]
|
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
import os
|
||||
from io import open
|
||||
import re
|
||||
|
||||
# example setup.py Feel free to copy the entire "azure-template" folder into a package folder named
|
||||
# with "azure-<yourpackagename>". Ensure that the below arguments to setup() are updated to reflect
|
||||
# your package.
|
||||
|
||||
# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way
|
||||
# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/packaging.md
|
||||
|
||||
PACKAGE_NAME = "optional-environment-test"
|
||||
|
||||
# a-b-c => a/b/c
|
||||
package_folder_path = PACKAGE_NAME.replace("-", "/")
|
||||
# a-b-c => a.b.c
|
||||
namespace_name = PACKAGE_NAME.replace("-", ".")
|
||||
|
||||
with open("README.md", encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
|
||||
setup(
|
||||
name=PACKAGE_NAME,
|
||||
version='1.0.0',
|
||||
description=PACKAGE_NAME,
|
||||
# ensure that these are updated to reflect the package owners' information
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
url="https://github.com/Azure/azure-sdk-for-python",
|
||||
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
|
||||
author="Microsoft Corporation",
|
||||
author_email="azuresdkengsysadmins@microsoft.com",
|
||||
license="MIT License",
|
||||
# ensure that the development status reflects the status of your package
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
],
|
||||
packages=find_packages(
|
||||
exclude=[
|
||||
"tests",
|
||||
# Exclude packages that will be covered by PEP420 or nspkg
|
||||
# This means any folder structure that only consists of a __init__.py.
|
||||
# For example, for storage, this would mean adding 'azure.storage'
|
||||
# in addition to the default 'azure' that is seen here.
|
||||
"azure",
|
||||
]
|
||||
),
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"azure": ["py.typed"],
|
||||
},
|
||||
install_requires=[
|
||||
"azure-core<2.0.0,>=1.10.0",
|
||||
],
|
||||
python_requires=">=3.7",
|
||||
project_urls={
|
||||
"Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues",
|
||||
"Source": "https://github.com/Azure/azure-sdk-python",
|
||||
},
|
||||
)
|
|
@ -0,0 +1,3 @@
|
|||
# Integration Scenario
|
||||
|
||||
This scenario is intended to exercise the various options of the `optional` environment. Check the values present in `pyproject.toml` under `tool.azure-sdk-build.optional`. This is merely the scenario, and none of the test code. Check `test_optional_functionality.py` for the actual test cases.
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@ -0,0 +1,5 @@
|
|||
-e ../../../../../../tools/azure-sdk-tools
|
||||
../../../../../../sdk/core/azure-core
|
||||
-e ../../../../../../tools/azure-devtools
|
||||
aiohttp
|
||||
requests
|
|
@ -0,0 +1,7 @@
|
|||
[tool.azure-sdk-build]
|
||||
mypy = true
|
||||
type_check_samples = true
|
||||
verifytypes = true
|
||||
pyright = true
|
||||
pylint = true
|
||||
black = true
|
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
import os
|
||||
from io import open
|
||||
import re
|
||||
|
||||
# example setup.py Feel free to copy the entire "azure-template" folder into a package folder named
|
||||
# with "azure-<yourpackagename>". Ensure that the below arguments to setup() are updated to reflect
|
||||
# your package.
|
||||
|
||||
# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way
|
||||
# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/packaging.md
|
||||
|
||||
PACKAGE_NAME = "optional-environment-test"
|
||||
|
||||
# a-b-c => a/b/c
|
||||
package_folder_path = PACKAGE_NAME.replace("-", "/")
|
||||
# a-b-c => a.b.c
|
||||
namespace_name = PACKAGE_NAME.replace("-", ".")
|
||||
|
||||
with open("README.md", encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
|
||||
setup(
|
||||
name=PACKAGE_NAME,
|
||||
version='1.0.0',
|
||||
description=PACKAGE_NAME,
|
||||
# ensure that these are updated to reflect the package owners' information
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
url="https://github.com/Azure/azure-sdk-for-python",
|
||||
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
|
||||
author="Microsoft Corporation",
|
||||
author_email="azuresdkengsysadmins@microsoft.com",
|
||||
license="MIT License",
|
||||
# ensure that the development status reflects the status of your package
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
],
|
||||
packages=find_packages(
|
||||
exclude=[
|
||||
"tests",
|
||||
# Exclude packages that will be covered by PEP420 or nspkg
|
||||
# This means any folder structure that only consists of a __init__.py.
|
||||
# For example, for storage, this would mean adding 'azure.storage'
|
||||
# in addition to the default 'azure' that is seen here.
|
||||
"azure",
|
||||
]
|
||||
),
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"azure": ["py.typed"],
|
||||
},
|
||||
install_requires=[
|
||||
"azure-core<2.0.0,>=1.10.0",
|
||||
],
|
||||
python_requires=">=3.7",
|
||||
project_urls={
|
||||
"Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues",
|
||||
"Source": "https://github.com/Azure/azure-sdk-python",
|
||||
},
|
||||
)
|
|
@ -8,23 +8,6 @@ repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..",
|
|||
core_service_root = os.path.join(repo_root, "sdk", "core")
|
||||
storage_service_root = os.path.join(repo_root, "sdk", "storage")
|
||||
|
||||
|
||||
def test_toml_result():
|
||||
package_with_toml = os.path.join(core_service_root, "azure-core")
|
||||
|
||||
parsed_setup = ParsedSetup.from_path(package_with_toml)
|
||||
result = parsed_setup.get_build_config()
|
||||
|
||||
expected = {
|
||||
"mypy": True,
|
||||
"type_check_samples": True,
|
||||
"verifytypes": True,
|
||||
"pyright": False,
|
||||
}
|
||||
|
||||
assert expected == result
|
||||
|
||||
|
||||
def test_discovery():
|
||||
results = discover_targeted_packages("azure*", core_service_root)
|
||||
|
||||
|
@ -34,7 +17,6 @@ def test_discovery():
|
|||
assert len(results) > 1
|
||||
assert len(non_empty_results) == 1
|
||||
|
||||
|
||||
def test_discovery_omit_mgmt():
|
||||
results = discover_targeted_packages("azure*", storage_service_root, filter_type="Omit_management")
|
||||
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
import os
|
||||
import pytest
|
||||
|
||||
from ci_tools.parsing import ParsedSetup
|
||||
from ci_tools.functions import get_config_setting
|
||||
from ci_tools.scenario.generation import create_scenario_file
|
||||
|
||||
integration_folder = os.path.join(os.path.dirname(__file__), 'integration')
|
||||
|
||||
def test_toml_result():
|
||||
package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_two_options')
|
||||
parsed_setup = ParsedSetup.from_path(package_with_toml)
|
||||
actual = parsed_setup.get_build_config()
|
||||
|
||||
expected = {
|
||||
'mypy': True,
|
||||
'type_check_samples': True,
|
||||
'verifytypes': True,
|
||||
'pyright': True,
|
||||
'pylint': True,
|
||||
'black': True,
|
||||
'optional':[
|
||||
{
|
||||
'name': 'no_requests',
|
||||
'install': [],
|
||||
'uninstall': ['requests'],
|
||||
'additional_pytest_args': ['-k', '*_async.py']
|
||||
},
|
||||
{
|
||||
'name': 'no_aiohttp',
|
||||
'install': [],
|
||||
'uninstall': ['aiohttp'],
|
||||
'additional_pytest_args': ['-k', 'not *_async.py']
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_optional_specific_get():
|
||||
package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_two_options')
|
||||
actual = get_config_setting(package_with_toml, 'optional')
|
||||
expected = [
|
||||
{
|
||||
'name': 'no_requests',
|
||||
'install': [],
|
||||
'uninstall': ['requests'],
|
||||
'additional_pytest_args': ['-k', '*_async.py']
|
||||
},
|
||||
{
|
||||
'name': 'no_aiohttp',
|
||||
'install': [],
|
||||
'uninstall': ['aiohttp'],
|
||||
'additional_pytest_args': ['-k', 'not *_async.py']
|
||||
}
|
||||
]
|
||||
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def test_optional_specific_get_no_result():
|
||||
package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_zero_options')
|
||||
actual = get_config_setting(package_with_toml, 'optional', None)
|
||||
expected = None
|
||||
|
||||
assert expected == actual
|
|
@ -1,10 +1,12 @@
|
|||
from ci_tools.parsing import parse_require, ParsedSetup
|
||||
from packaging.specifiers import SpecifierSet
|
||||
import os
|
||||
import pdb
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
test_folder = os.path.join(os.path.dirname(__file__), )
|
||||
|
||||
def test_parse_require():
|
||||
test_scenarios = [
|
||||
|
@ -200,4 +202,4 @@ setup(
|
|||
assert result.ext_package == "azure.storage.extensions"
|
||||
assert result.ext_modules is not None
|
||||
assert len(result.ext_modules) == 1
|
||||
assert str(type(result.ext_modules[0])) == "<class 'setuptools.extension.Extension'>"
|
||||
assert str(type(result.ext_modules[0])) == "<class 'setuptools.extension.Extension'>"
|
||||
|
|
|
@ -4,7 +4,7 @@ import os, tempfile, shutil
|
|||
import pytest
|
||||
|
||||
from typing import List
|
||||
from ci_tools.functions import replace_dev_reqs
|
||||
from ci_tools.generation import replace_dev_reqs
|
||||
|
||||
integration_folder = os.path.join(os.path.dirname(__file__), "integration")
|
||||
sample_dev_reqs_folder = os.path.join(integration_folder, "scenarios", "dev_requirement_samples")
|
||||
|
|
Загрузка…
Ссылка в новой задаче