From adde17bdff6393e6842743634d29a2aff2f01cfa Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sun, 16 Nov 2025 07:10:36 +0000 Subject: [PATCH 01/76] initial migration --- argfile.json | 22 --- .../azpysdk/copilot-instructions.md | 13 ++ eng/tools/azure-sdk-tools/azpysdk/main.py | 2 + eng/tools/azure-sdk-tools/azpysdk/whl.py | 180 ++++++++++++------ 4 files changed, 140 insertions(+), 77 deletions(-) delete mode 100644 argfile.json create mode 100644 eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md diff --git a/argfile.json b/argfile.json deleted file mode 100644 index add92aa30a91..000000000000 --- a/argfile.json +++ /dev/null @@ -1,22 +0,0 @@ -[ - { - "name": "azure-core", - "common_root": "azure", - "service": "core", - "in_batch": "True", - "checkout": [ - { - "package": "azure-core", - "version": "1.32.0" - }, - { - "package": "azure-mgmt-core", - "version": "1.5.0" - }, - { - "package": "azure-common", - "version": "1.1.28" - } - ] - } -] \ No newline at end of file diff --git a/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md b/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md new file mode 100644 index 000000000000..ccead422a8e6 --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md @@ -0,0 +1,13 @@ +# copilot check migration instructions + +This document provides instructions on how to migrate existing Copilot check configurations to the new format used by the Azure SDK Tools. + +1. identify a check from `tox.ini`. +2. Given that `tox.ini`, read the `tox.ini` config for that environment from `eng/tox/tox.ini` +3. Copy the relevant configuration options from `tox.ini` to a new check configuration file located at `eng/tools/azure-sdk-tools/azpysdk/.py`. +4. Ensure that within the new file, a class inheriting from `Check` is created, and the flow of said check looks similar to any of the existing checks in this directory: `next_mpy.py`, `import_all.py`. +5. Import and register the new check within `main.py` located in this directory. +6. Make a decision for any of the custom commands in the `tox.ini` file to either pull the rference iimpoement over here, or to remove them if they are no longer necessary. +7. Test the new check by running it with `azpysdk whl --isolate azure-template` +8. Ensure that environment variables are set if they are in the original tox.ini file. migrate the requirements that would get installed in the tox.ini file to calls to Check.install_into_venv to ensure that the check has the necessary dependencies. + diff --git a/eng/tools/azure-sdk-tools/azpysdk/main.py b/eng/tools/azure-sdk-tools/azpysdk/main.py index dd8fac51bf79..92ba31310b6a 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/main.py +++ b/eng/tools/azure-sdk-tools/azpysdk/main.py @@ -25,6 +25,7 @@ from .ruff import ruff from .verifytypes import verifytypes from .verify_sdist import verify_sdist +from .whl import whl from .verify_whl import verify_whl from .bandit import bandit from .verify_keywords import verify_keywords @@ -85,6 +86,7 @@ def build_parser() -> argparse.ArgumentParser: ruff().register(subparsers, [common]) verifytypes().register(subparsers, [common]) verify_sdist().register(subparsers, [common]) + whl().register(subparsers, [common]) verify_whl().register(subparsers, [common]) bandit().register(subparsers, [common]) verify_keywords().register(subparsers, [common]) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index f9a59bc509e8..792f597f9eb4 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -1,18 +1,29 @@ import argparse -import tempfile import os -from typing import Optional, List, Any import sys -from subprocess import run +from subprocess import CalledProcessError +from typing import List, Optional from .Check import Check from ci_tools.functions import is_error_code_5_allowed, install_into_venv -from ci_tools.variables import set_envvar_defaults -from ci_tools.parsing import ParsedSetup from ci_tools.scenario.generation import create_package_and_install +from ci_tools.variables import discover_repo_root, set_envvar_defaults from ci_tools.logging import logger +REPO_ROOT = discover_repo_root() + +PACKAGING_REQUIREMENTS = [ + "wheel==0.45.1", + "packaging==24.2", + "urllib3==2.2.3", + "tomli==2.2.1", + "build==1.2.2.post1", + "pkginfo==1.12.1.2", +] + +TEST_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/test_tools.txt") + class whl(Check): def __init__(self) -> None: @@ -27,7 +38,11 @@ def register( parents = parent_parsers or [] p = subparsers.add_parser("whl", parents=parents, help="Run the whl check") p.set_defaults(func=self.run) - # TODO add mark_args, and other parameters + p.add_argument( + "--pytest-args", + nargs=argparse.REMAINDER, + help="Additional arguments forwarded to pytest.", + ) def run(self, args: argparse.Namespace) -> int: """Run the whl check command.""" @@ -36,55 +51,110 @@ def run(self, args: argparse.Namespace) -> int: set_envvar_defaults() targeted = self.get_targeted_directories(args) + if not targeted: + logger.warning("No target packages discovered for whl check.") + return 0 - results: List[int] = [] + overall_result = 0 for parsed in targeted: - pkg = parsed.folder - executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, pkg) - - logger.info(f"Invoking check with {executable}") - - self.install_dev_reqs(executable, args, pkg) - - create_package_and_install( - distribution_directory=staging_directory, - target_setup=pkg, - skip_install=False, - cache_dir=None, - work_dir=staging_directory, - force_create=False, - package_type="wheel", - pre_download_disabled=False, - python_executable=executable, - ) - - # TODO: split sys.argv[1:] on -- and pass in everything after the -- as additional arguments - # TODO: handle mark_args - logger.info(f"Invoke pytest for {pkg}") - exit_code = run( - [executable, "-m", "pytest", "."] - + [ - "-rsfE", - f"--junitxml={pkg}/test-junit-{args.command}.xml", - "--verbose", - "--cov-branch", - "--durations=10", - "--ignore=azure", - "--ignore-glob=.venv*", - "--ignore=build", - "--ignore=.eggs", - "--ignore=samples", - ], - cwd=pkg, - ).returncode - - if exit_code != 0: - if exit_code == 5 and is_error_code_5_allowed(parsed.folder, parsed.name): - logger.info("Exit code 5 is allowed, continuing execution.") - else: - logger.info(f"pytest failed with exit code {exit_code}.") - results.append(exit_code) - - # final result is the worst case of all the results - return max(results) + package_dir = parsed.folder + package_name = parsed.name + + executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) + logger.info(f"Processing {package_name} using interpreter {executable}") + + try: + self._install_common_requirements(executable, package_dir) + self.install_dev_reqs(executable, args, package_dir) + except CalledProcessError as exc: + logger.error(f"Failed to install dependencies for {package_name}: {exc}") + overall_result = max(overall_result, exc.returncode or 1) + continue + + try: + create_package_and_install( + distribution_directory=staging_directory, + target_setup=package_dir, + skip_install=False, + cache_dir=None, + work_dir=staging_directory, + force_create=False, + package_type="wheel", + pre_download_disabled=False, + python_executable=executable, + ) + except CalledProcessError as exc: + logger.error(f"Failed to build/install wheel for {package_name}: {exc}") + overall_result = max(overall_result, exc.returncode or 1) + continue + + pytest_args = self._build_pytest_args(package_dir, args) + pytest_command = ["-m", "pytest", *pytest_args] + pytest_result = self.run_venv_command(executable, pytest_command, cwd=package_dir) + + if pytest_result.returncode != 0: + if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): + logger.info( + "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", + package_name, + ) + # Align with tox: skip coverage when tests are skipped entirely + continue + + logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") + if pytest_result.stdout: + logger.error(pytest_result.stdout) + if pytest_result.stderr: + logger.error(pytest_result.stderr) + overall_result = max(overall_result, pytest_result.returncode) + continue + + coverage_command = [ + os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), + "-t", + package_dir, + "-r", + REPO_ROOT, + ] + coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) + if coverage_result.returncode != 0: + logger.error(f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}.") + if coverage_result.stdout: + logger.error(coverage_result.stdout) + if coverage_result.stderr: + logger.error(coverage_result.stderr) + overall_result = max(overall_result, coverage_result.returncode) + + return overall_result + + def _install_common_requirements(self, executable: str, package_dir: str) -> None: + if PACKAGING_REQUIREMENTS: + install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) + + if os.path.exists(TEST_TOOLS_REQUIREMENTS): + install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) + else: + logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") + + def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: + log_level = os.getenv("PYTEST_LOG_LEVEL", "51") + junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") + + default_args = [ + "-rsfE", + f"--junitxml={junit_path}", + "--verbose", + "--cov-branch", + "--durations=10", + "--ignore=azure", + "--ignore=.tox", + "--ignore=build", + "--ignore=.eggs", + "--ignore=samples", + f"--log-cli-level={log_level}", + ] + + additional = args.pytest_args if args.pytest_args else [] + + return [*default_args, *additional, package_dir] From 7574af1c6dee64e117074d60e5d964515cecd6f0 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sun, 16 Nov 2025 07:28:09 +0000 Subject: [PATCH 02/76] well it works locally lets see what melts uptop --- .../templates/steps/build-package-artifacts.yml | 16 ++++++++++++++++ eng/tools/azure-sdk-tools/azpysdk/Check.py | 12 ++++++++++-- eng/tools/azure-sdk-tools/azpysdk/whl.py | 4 ++-- eng/tools/azure-sdk-tools/ci_tools/variables.py | 5 ++++- 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/templates/steps/build-package-artifacts.yml b/eng/pipelines/templates/steps/build-package-artifacts.yml index aae7b4262152..a5c408111372 100644 --- a/eng/pipelines/templates/steps/build-package-artifacts.yml +++ b/eng/pipelines/templates/steps/build-package-artifacts.yml @@ -131,6 +131,22 @@ steps: displayName: 'Verify Readme' condition: and(succeededOrFailed(), eq(variables['Agent.OS'], 'Linux')) + - task: PythonScript@0 + displayName: 'Run Early Whl Check' + inputs: + scriptPath: 'eng/scripts/dispatch_checks.py' + arguments: >- + "$(TargetingString)" + --service="${{ parameters.ServiceDirectory }}" + --checks="whl" + --filter-type="None" + ${{ parameters.AdditionalTestArgs }} + env: + TOX_PIP_IMPL: "uv" + VIRTUAL_ENV: "" + PYTHONHOME: "" + condition: and(succeededOrFailed(), ne(variables['Skip.Whl'],'true')) + - ${{ parameters.BeforePublishSteps }} # we need to publish an empty artifact. when publishing an empty artifact, let's ensure that there isn't anything to diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index bb395211e103..0329eab2a927 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -110,7 +110,13 @@ def get_executable(self, isolate: bool, check_name: str, executable: str, packag return executable, staging_directory def run_venv_command( - self, executable: str, command: Sequence[str], cwd: str, check: bool = False, append_executable: bool = True + self, + executable: str, + command: Sequence[str], + cwd: str, + check: bool = False, + append_executable: bool = True, + immediately_dump: bool = False ) -> subprocess.CompletedProcess[str]: """Run a command in the given virtual environment. - Prepends the virtual environment's bin directory to the PATH environment variable (if one exists) @@ -149,8 +155,10 @@ def run_venv_command( logger.debug(f"VIRTUAL_ENV: {env['VIRTUAL_ENV']}.") logger.debug(f"PATH : {env['PATH']}.") + s_out = None if immediately_dump else subprocess.PIPE + s_err = None if immediately_dump else subprocess.PIPE result = subprocess.run( - cmd_to_run, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=check, env=env + cmd_to_run, cwd=cwd, stdout=s_out, stderr=s_err, text=True, check=check, env=env ) return result diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index 792f597f9eb4..bad9b4d41e56 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -48,7 +48,7 @@ def run(self, args: argparse.Namespace) -> int: """Run the whl check command.""" logger.info("Running whl check...") - set_envvar_defaults() + set_envvar_defaults({"PROXY_URL": "http://localhost:5001"}) targeted = self.get_targeted_directories(args) if not targeted: @@ -91,7 +91,7 @@ def run(self, args: argparse.Namespace) -> int: pytest_args = self._build_pytest_args(package_dir, args) pytest_command = ["-m", "pytest", *pytest_args] - pytest_result = self.run_venv_command(executable, pytest_command, cwd=package_dir) + pytest_result = self.run_venv_command(executable, pytest_command, cwd=package_dir, immediately_dump=True) if pytest_result.returncode != 0: if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index 6e8731d596ab..4c1bf8083dbe 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -121,10 +121,13 @@ def set_environment_from_dictionary(settings: Dict[str, str]) -> None: os.environ.setdefault(key, value) -def set_envvar_defaults() -> None: +def set_envvar_defaults(additional: Dict[str, str] = {}) -> None: """ Sets default environment variables for any given process to our default dictionary. Args: settings (Dict[str, str]): A dictionary of environment variable names and their default values. """ set_environment_from_dictionary(DEFAULT_ENVIRONMENT_VARIABLES) + + # this will override any defaults set prior in the case of override + set_environment_from_dictionary(additional) From 0315a9ab17618aad0df72179887f1ecfc9d36475 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sun, 16 Nov 2025 07:34:18 +0000 Subject: [PATCH 03/76] fix parameter --- eng/pipelines/templates/steps/build-package-artifacts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/steps/build-package-artifacts.yml b/eng/pipelines/templates/steps/build-package-artifacts.yml index a5c408111372..72693804e8cf 100644 --- a/eng/pipelines/templates/steps/build-package-artifacts.yml +++ b/eng/pipelines/templates/steps/build-package-artifacts.yml @@ -140,7 +140,7 @@ steps: --service="${{ parameters.ServiceDirectory }}" --checks="whl" --filter-type="None" - ${{ parameters.AdditionalTestArgs }} + --wheel_dir="$(Build.ArtifactStagingDirectory)" env: TOX_PIP_IMPL: "uv" VIRTUAL_ENV: "" From 6b4f79b822bc9ec9eb2f72988706653caddc3949 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Tue, 18 Nov 2025 00:28:17 +0000 Subject: [PATCH 04/76] apply settings --- .../templates/steps/build-package-artifacts.yml | 16 ---------------- .../azpysdk/copilot-instructions.md | 13 ------------- eng/tools/azure-sdk-tools/azpysdk/whl.py | 13 +++++-------- eng/tools/azure-sdk-tools/ci_tools/variables.py | 7 ++++--- 4 files changed, 9 insertions(+), 40 deletions(-) delete mode 100644 eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md diff --git a/eng/pipelines/templates/steps/build-package-artifacts.yml b/eng/pipelines/templates/steps/build-package-artifacts.yml index 72693804e8cf..aae7b4262152 100644 --- a/eng/pipelines/templates/steps/build-package-artifacts.yml +++ b/eng/pipelines/templates/steps/build-package-artifacts.yml @@ -131,22 +131,6 @@ steps: displayName: 'Verify Readme' condition: and(succeededOrFailed(), eq(variables['Agent.OS'], 'Linux')) - - task: PythonScript@0 - displayName: 'Run Early Whl Check' - inputs: - scriptPath: 'eng/scripts/dispatch_checks.py' - arguments: >- - "$(TargetingString)" - --service="${{ parameters.ServiceDirectory }}" - --checks="whl" - --filter-type="None" - --wheel_dir="$(Build.ArtifactStagingDirectory)" - env: - TOX_PIP_IMPL: "uv" - VIRTUAL_ENV: "" - PYTHONHOME: "" - condition: and(succeededOrFailed(), ne(variables['Skip.Whl'],'true')) - - ${{ parameters.BeforePublishSteps }} # we need to publish an empty artifact. when publishing an empty artifact, let's ensure that there isn't anything to diff --git a/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md b/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md deleted file mode 100644 index ccead422a8e6..000000000000 --- a/eng/tools/azure-sdk-tools/azpysdk/copilot-instructions.md +++ /dev/null @@ -1,13 +0,0 @@ -# copilot check migration instructions - -This document provides instructions on how to migrate existing Copilot check configurations to the new format used by the Azure SDK Tools. - -1. identify a check from `tox.ini`. -2. Given that `tox.ini`, read the `tox.ini` config for that environment from `eng/tox/tox.ini` -3. Copy the relevant configuration options from `tox.ini` to a new check configuration file located at `eng/tools/azure-sdk-tools/azpysdk/.py`. -4. Ensure that within the new file, a class inheriting from `Check` is created, and the flow of said check looks similar to any of the existing checks in this directory: `next_mpy.py`, `import_all.py`. -5. Import and register the new check within `main.py` located in this directory. -6. Make a decision for any of the custom commands in the `tox.ini` file to either pull the rference iimpoement over here, or to remove them if they are no longer necessary. -7. Test the new check by running it with `azpysdk whl --isolate azure-template` -8. Ensure that environment variables are set if they are in the original tox.ini file. migrate the requirements that would get installed in the tox.ini file to calls to Check.install_into_venv to ensure that the check has the necessary dependencies. - diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index bad9b4d41e56..fbd7850f8e5c 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -103,11 +103,6 @@ def run(self, args: argparse.Namespace) -> int: continue logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") - if pytest_result.stdout: - logger.error(pytest_result.stdout) - if pytest_result.stderr: - logger.error(pytest_result.stderr) - overall_result = max(overall_result, pytest_result.returncode) continue coverage_command = [ @@ -119,7 +114,9 @@ def run(self, args: argparse.Namespace) -> int: ] coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) if coverage_result.returncode != 0: - logger.error(f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}.") + logger.error( + f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}." + ) if coverage_result.stdout: logger.error(coverage_result.stdout) if coverage_result.stderr: @@ -129,8 +126,7 @@ def run(self, args: argparse.Namespace) -> int: return overall_result def _install_common_requirements(self, executable: str, package_dir: str) -> None: - if PACKAGING_REQUIREMENTS: - install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) + install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) if os.path.exists(TEST_TOOLS_REQUIREMENTS): install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) @@ -149,6 +145,7 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List "--durations=10", "--ignore=azure", "--ignore=.tox", + "--ignore-glob=.venv*", "--ignore=build", "--ignore=.eggs", "--ignore=samples", diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index 4c1bf8083dbe..89dce671cd8a 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -121,7 +121,7 @@ def set_environment_from_dictionary(settings: Dict[str, str]) -> None: os.environ.setdefault(key, value) -def set_envvar_defaults(additional: Dict[str, str] = {}) -> None: +def set_envvar_defaults(settings: Optional[Dict[str, str]] = None) -> None: """ Sets default environment variables for any given process to our default dictionary. Args: @@ -129,5 +129,6 @@ def set_envvar_defaults(additional: Dict[str, str] = {}) -> None: """ set_environment_from_dictionary(DEFAULT_ENVIRONMENT_VARIABLES) - # this will override any defaults set prior in the case of override - set_environment_from_dictionary(additional) + if settings: + # this will override any defaults set prior in the case of override + set_environment_from_dictionary(settings) From 9cc2330e13c615a37113bd2a3b0e863df518280d Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Tue, 18 Nov 2025 00:29:34 +0000 Subject: [PATCH 05/76] apply formatting --- eng/tools/azure-sdk-tools/azpysdk/Check.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 0329eab2a927..212e089b82d0 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -116,7 +116,7 @@ def run_venv_command( cwd: str, check: bool = False, append_executable: bool = True, - immediately_dump: bool = False + immediately_dump: bool = False, ) -> subprocess.CompletedProcess[str]: """Run a command in the given virtual environment. - Prepends the virtual environment's bin directory to the PATH environment variable (if one exists) @@ -157,9 +157,7 @@ def run_venv_command( s_out = None if immediately_dump else subprocess.PIPE s_err = None if immediately_dump else subprocess.PIPE - result = subprocess.run( - cmd_to_run, cwd=cwd, stdout=s_out, stderr=s_err, text=True, check=check, env=env - ) + result = subprocess.run(cmd_to_run, cwd=cwd, stdout=s_out, stderr=s_err, text=True, check=check, env=env) return result From 9c7709f5cb46b76d1a53763b61adb34097ec05f9 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 19 Nov 2025 23:16:14 +0000 Subject: [PATCH 06/76] utilize whl check for prs --- eng/pipelines/templates/steps/build-test.yml | 97 ++++++++++---------- scripts/devops_tasks/set_tox_environment.py | 3 +- 2 files changed, 48 insertions(+), 52 deletions(-) diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index 8364a124cb4e..f0f2f1815f5f 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -44,6 +44,8 @@ steps: $(PIP_EXE) freeze displayName: 'Prep Environment' + # this needs to move under eng/scripts from scripts/devops_tasks/ + # AND be renamed prior to merging any PR with this changeset - task: PythonScript@0 displayName: 'Set Tox Environment' inputs: @@ -64,8 +66,6 @@ steps: - ${{ parameters.BeforeTestSteps }} - - template: /eng/pipelines/templates/steps/seed-virtualenv-wheels.yml - - ${{ if eq('true', parameters.UseFederatedAuth) }}: - task: AzurePowerShell@5 @@ -79,6 +79,7 @@ steps: pwsh: true ScriptType: InlineScript Inline: >- + $env:TOX_PIP_IMPL="uv" $account = (Get-AzContext).Account; $env:AZURESUBSCRIPTION_CLIENT_ID = $account.Id; $env:AZURESUBSCRIPTION_TENANT_ID = $account.Tenants; @@ -92,20 +93,20 @@ steps: $markArg = "${{ parameters.TestMarkArgument }}" } - python scripts/devops_tasks/dispatch_tox.py + python eng/scripts/dispatch_checks.py "$(TargetingString)" ${{ parameters.AdditionalTestArgs }} ${{ parameters.CoverageArg }} --mark_arg="$markArg" --service="${{ parameters.ServiceDirectory }}" - --toxenv="${{ parameters.ToxTestEnv }}" + --checks="${{ parameters.ToxTestEnv }}" --injected-packages="${{ parameters.InjectedPackages }}" - --tenvparallel="${{ parameters.ToxEnvParallel }}"; Write-Host "Last exit code: $LASTEXITCODE"; exit $LASTEXITCODE; - ${{ else }}: - pwsh: | + $env:TOX_PIP_IMPL="uv" Write-Host (Get-Command python).Source if ($env:TESTMARKARGUMENT) { @@ -115,14 +116,13 @@ steps: $markArg = "${{ parameters.TestMarkArgument }}" } - python scripts/devops_tasks/dispatch_tox.py "$(TargetingString)" ` + python eng/scripts/dispatch_checks.py "$(TargetingString)" ` ${{ parameters.AdditionalTestArgs }} ` ${{ parameters.CoverageArg }} ` --mark_arg="$markArg" ` --service="${{ parameters.ServiceDirectory }}" ` - --toxenv="${{ parameters.ToxTestEnv }}" ` + --checks="${{ parameters.ToxTestEnv }}" --injected-packages="${{ parameters.InjectedPackages }}" ` - --tenvparallel="${{ parameters.ToxEnvParallel }}"; exit $LASTEXITCODE; env: ${{ parameters.EnvVars }} displayName: Run Tests @@ -147,49 +147,44 @@ steps: displayName: Report Coverage condition: and(succeeded(), ${{ parameters.RunCoverage }}) - - ${{ if eq('true', parameters.UseFederatedAuth) }}: - - task: AzurePowerShell@5 - displayName: Test Samples (AzurePowerShell@5) - condition: and(succeeded(), eq(variables['TestSamples'], 'true')) - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - # Enable samples tests that use DefaultAzureCredential to load the federated pipeline credential - AZURE_POD_IDENTITY_AUTHORITY_HOST: 'https://FakeAuthorityHost' - ${{ insert }}: ${{ parameters.EnvVars }} - inputs: - azureSubscription: azure-sdk-tests-public - azurePowerShellVersion: LatestVersion - pwsh: true - ScriptType: InlineScript - Inline: | - $account = (Get-AzContext).Account; - $env:AZURESUBSCRIPTION_CLIENT_ID = $account.Id; - $env:AZURESUBSCRIPTION_TENANT_ID = $account.Tenants; - - Write-Host (Get-Command python).Source - - python scripts/devops_tasks/dispatch_tox.py "$(TargetingString)" ` - --service="${{ parameters.ServiceDirectory }}" ` - --toxenv="samples" - - Write-Host "Last exit code: $LASTEXITCODE"; - exit $LASTEXITCODE; - - ${{ else }}: - - pwsh: | - if ($IsWindows) { - . $(VENV_LOCATION)/Scripts/Activate.ps1 - } - else { - . $(VENV_LOCATION)/bin/activate.ps1 - } - Write-Host (Get-Command python).Source - python scripts/devops_tasks/dispatch_tox.py "$(TargetingString)" ` - --service="${{ parameters.ServiceDirectory }}" ` - --toxenv="samples" - exit $LASTEXITCODE; - env: ${{ parameters.EnvVars }} - displayName: 'Test Samples' - condition: and(succeeded(), eq(variables['TestSamples'], 'true')) + # re-enable after migrating `samples` + # - ${{ if eq('true', parameters.UseFederatedAuth) }}: + # - task: AzurePowerShell@5 + # displayName: Test Samples (AzurePowerShell@5) + # condition: and(succeeded(), eq(variables['TestSamples'], 'true')) + # env: + # SYSTEM_ACCESSTOKEN: $(System.AccessToken) + # # Enable samples tests that use DefaultAzureCredential to load the federated pipeline credential + # AZURE_POD_IDENTITY_AUTHORITY_HOST: 'https://FakeAuthorityHost' + # ${{ insert }}: ${{ parameters.EnvVars }} + # inputs: + # azureSubscription: azure-sdk-tests-public + # azurePowerShellVersion: LatestVersion + # pwsh: true + # ScriptType: InlineScript + # Inline: | + # $account = (Get-AzContext).Account; + # $env:AZURESUBSCRIPTION_CLIENT_ID = $account.Id; + # $env:AZURESUBSCRIPTION_TENANT_ID = $account.Tenants; + + # Write-Host (Get-Command python).Source + + # python eng/scripts/dispatch_checks.py "$(TargetingString)" ` + # --service="${{ parameters.ServiceDirectory }}" ` + # --checks="samples" + + # Write-Host "Last exit code: $LASTEXITCODE"; + # exit $LASTEXITCODE; + # - ${{ else }}: + # - pwsh: | + # Write-Host (Get-Command python).Source + # python eng/scripts/dispatch_checks.py "$(TargetingString)" ` + # --service="${{ parameters.ServiceDirectory }}" ` + # --checks="samples" + # exit $LASTEXITCODE; + # env: ${{ parameters.EnvVars }} + # displayName: 'Test Samples' + # condition: and(succeeded(), eq(variables['TestSamples'], 'true')) - task: PublishTestResults@2 condition: always() diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index fa689640ee55..353ae4220308 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -20,7 +20,8 @@ "mindependency", "whl_no_aio", ] -PR_BUILD_SET = ["whl", "sdist", "mindependency"] +# this branch is checking only whl for now. rest will follow as they migrate +PR_BUILD_SET = ["whl"] #, "sdist", "mindependency" def resolve_devops_variable(var_value: str) -> List[str]: From 97b39f6f4eb91009b17cba0a9571ec07c2fa4cb0 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 20 Nov 2025 00:48:34 +0000 Subject: [PATCH 07/76] fix the pipeline --- eng/pipelines/templates/steps/build-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index f0f2f1815f5f..c7c8867b63f0 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -121,7 +121,7 @@ steps: ${{ parameters.CoverageArg }} ` --mark_arg="$markArg" ` --service="${{ parameters.ServiceDirectory }}" ` - --checks="${{ parameters.ToxTestEnv }}" + --checks="${{ parameters.ToxTestEnv }}" ` --injected-packages="${{ parameters.InjectedPackages }}" ` exit $LASTEXITCODE; env: ${{ parameters.EnvVars }} From 4d35f37c9322452c42a335b79a115a103b00dbb2 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 20 Nov 2025 22:59:07 +0000 Subject: [PATCH 08/76] command was bleeding forward --- eng/pipelines/templates/steps/build-test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index c7c8867b63f0..c680c483e5d5 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -67,7 +67,6 @@ steps: - ${{ parameters.BeforeTestSteps }} - ${{ if eq('true', parameters.UseFederatedAuth) }}: - - task: AzurePowerShell@5 displayName: Run Tests (AzurePowerShell@5) env: @@ -122,7 +121,7 @@ steps: --mark_arg="$markArg" ` --service="${{ parameters.ServiceDirectory }}" ` --checks="${{ parameters.ToxTestEnv }}" ` - --injected-packages="${{ parameters.InjectedPackages }}" ` + --injected-packages="${{ parameters.InjectedPackages }}" exit $LASTEXITCODE; env: ${{ parameters.EnvVars }} displayName: Run Tests From a6c4a6f7eb54ad1e19a52f974128a1f43a51c8c6 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 21 Nov 2025 00:32:28 +0000 Subject: [PATCH 09/76] nicely format the printed results so that we can easily examine the output for a specific package --- eng/scripts/dispatch_checks.py | 6 ++++++ scripts/devops_tasks/set_tox_environment.py | 10 +++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 498e7194cc9c..756f4655a2b9 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -77,6 +77,9 @@ async def run_check( # Print captured output after completion to avoid interleaving header = f"===== OUTPUT: {check} :: {package} (exit {exit_code}) =====" trailer = "=" * len(header) + if in_ci(): + print(f"##[group]{package} :: {check}") + if stdout: print(header) print(stdout.rstrip()) @@ -86,6 +89,9 @@ async def run_check( print(stderr.rstrip()) print(trailer) + if in_ci(): + print("##[endgroup]") + # if we have any output collections to complete, do so now here # finally, we need to clean up any temp dirs created by --isolate diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index 353ae4220308..1e8f8f0870cc 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -14,11 +14,11 @@ FULL_BUILD_SET = [ "whl", - "sdist", - "depends", - "latestdependency", - "mindependency", - "whl_no_aio", + # "sdist", + # "depends", + # "latestdependency", + # "mindependency", + # "whl_no_aio", ] # this branch is checking only whl for now. rest will follow as they migrate PR_BUILD_SET = ["whl"] #, "sdist", "mindependency" From 3c30e0637ff70f92f4d015c6be82ab8c2f808a8f Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 21 Nov 2025 02:30:07 +0000 Subject: [PATCH 10/76] we actually fail when pytest fails --- eng/tools/azure-sdk-tools/azpysdk/whl.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index fbd7850f8e5c..f66e4beb6e64 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -101,9 +101,10 @@ def run(self, args: argparse.Namespace) -> int: ) # Align with tox: skip coverage when tests are skipped entirely continue - - logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") - continue + else: + overall_result = max(overall_result, pytest_result.returncode) + logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") + continue coverage_command = [ os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), From 435850299990da23e674d13f04da0d3d1a4bdcec Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 22 Nov 2025 02:22:07 +0000 Subject: [PATCH 11/76] failures will actually fail this thing now --- eng/tools/azure-sdk-tools/azpysdk/whl.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index f66e4beb6e64..6a692ac9c3be 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -55,7 +55,7 @@ def run(self, args: argparse.Namespace) -> int: logger.warning("No target packages discovered for whl check.") return 0 - overall_result = 0 + results = [] for parsed in targeted: package_dir = parsed.folder @@ -69,7 +69,7 @@ def run(self, args: argparse.Namespace) -> int: self.install_dev_reqs(executable, args, package_dir) except CalledProcessError as exc: logger.error(f"Failed to install dependencies for {package_name}: {exc}") - overall_result = max(overall_result, exc.returncode or 1) + results.append(exc.returncode) continue try: @@ -86,7 +86,7 @@ def run(self, args: argparse.Namespace) -> int: ) except CalledProcessError as exc: logger.error(f"Failed to build/install wheel for {package_name}: {exc}") - overall_result = max(overall_result, exc.returncode or 1) + results.append(1) continue pytest_args = self._build_pytest_args(package_dir, args) @@ -102,7 +102,7 @@ def run(self, args: argparse.Namespace) -> int: # Align with tox: skip coverage when tests are skipped entirely continue else: - overall_result = max(overall_result, pytest_result.returncode) + results.append(pytest_result.returncode) logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") continue @@ -122,9 +122,11 @@ def run(self, args: argparse.Namespace) -> int: logger.error(coverage_result.stdout) if coverage_result.stderr: logger.error(coverage_result.stderr) - overall_result = max(overall_result, coverage_result.returncode) + results.append(coverage_result.returncode) - return overall_result + results.append(0) + + return max(results) def _install_common_requirements(self, executable: str, package_dir: str) -> None: install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) From 6da6464e37f0ee043dae30f9f5b46c4caec17f87 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 22 Nov 2025 02:59:30 +0000 Subject: [PATCH 12/76] how is this not failing the entire invocation? I don't get it --- eng/tools/azure-sdk-tools/azpysdk/whl.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index 6a692ac9c3be..f8e10c3b6d2d 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -124,9 +124,7 @@ def run(self, args: argparse.Namespace) -> int: logger.error(coverage_result.stderr) results.append(coverage_result.returncode) - results.append(0) - - return max(results) + return max(results) if results else 0 def _install_common_requirements(self, executable: str, package_dir: str) -> None: install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) From 670aa5c8e1274832bd44044b8e0905fdfeeb29b5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 24 Nov 2025 16:53:25 -0800 Subject: [PATCH 13/76] add pytest exit code --- eng/tools/azure-sdk-tools/azpysdk/whl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index fbd7850f8e5c..15a8359ec251 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -101,8 +101,8 @@ def run(self, args: argparse.Namespace) -> int: ) # Align with tox: skip coverage when tests are skipped entirely continue - logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") + overall_result = max(overall_result, pytest_result.returncode or 1) continue coverage_command = [ From 6090b956576b328b328861377b0c206fca68fbc1 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 4 Dec 2025 02:23:01 +0000 Subject: [PATCH 14/76] patch up the test invocation --- eng/scripts/dispatch_checks.py | 12 ++++++------ eng/tools/azure-sdk-tools/azpysdk/whl.py | 3 ++- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 756f4655a2b9..85c9137eeb7f 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -95,12 +95,12 @@ async def run_check( # if we have any output collections to complete, do so now here # finally, we need to clean up any temp dirs created by --isolate - if in_ci(): - isolate_dir = os.path.join(package, f".venv_{check}") - try: - shutil.rmtree(isolate_dir) - except: - logger.warning(f"Failed to remove isolate dir {isolate_dir} for {package} / {check}") + # if in_ci(): + # isolate_dir = os.path.join(package, f".venv_{check}") + # try: + # shutil.rmtree(isolate_dir) + # except: + # logger.warning(f"Failed to remove isolate dir {isolate_dir} for {package} / {check}") return CheckResult(package, check, exit_code, duration, stdout, stderr) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index f8e10c3b6d2d..1ebf91753a90 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -91,7 +91,7 @@ def run(self, args: argparse.Namespace) -> int: pytest_args = self._build_pytest_args(package_dir, args) pytest_command = ["-m", "pytest", *pytest_args] - pytest_result = self.run_venv_command(executable, pytest_command, cwd=package_dir, immediately_dump=True) + pytest_result = self.run_venv_command(executable, pytest_command, cwd=staging_directory, immediately_dump=True) if pytest_result.returncode != 0: if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): @@ -139,6 +139,7 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") default_args = [ + f"{package_dir}", "-rsfE", f"--junitxml={junit_path}", "--verbose", From aacd30cecf605df19aefacfded784fbb7a907afe Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 4 Dec 2025 20:03:12 +0000 Subject: [PATCH 15/76] manual migration of whl/sdist/whl_no_aio. copilot mindependency but I don't trust it yet --- eng/scripts/dispatch_checks.py | 12 +- eng/test_tools.txt | 4 +- eng/tools/azure-sdk-tools/azpysdk/Check.py | 11 + .../azpysdk/dependency_check.py | 207 ++++++++++++++++++ .../azpysdk/install_and_test.py | 195 +++++++++++++++++ .../azpysdk/latestdependency.py | 27 +++ eng/tools/azure-sdk-tools/azpysdk/main.py | 8 + .../azure-sdk-tools/azpysdk/mindependency.py | 32 +++ eng/tools/azure-sdk-tools/azpysdk/sdist.py | 27 +++ eng/tools/azure-sdk-tools/azpysdk/whl.py | 157 ++----------- .../azure-sdk-tools/azpysdk/whl_no_aio.py | 44 ++++ 11 files changed, 572 insertions(+), 152 deletions(-) create mode 100644 eng/tools/azure-sdk-tools/azpysdk/dependency_check.py create mode 100644 eng/tools/azure-sdk-tools/azpysdk/install_and_test.py create mode 100644 eng/tools/azure-sdk-tools/azpysdk/latestdependency.py create mode 100644 eng/tools/azure-sdk-tools/azpysdk/mindependency.py create mode 100644 eng/tools/azure-sdk-tools/azpysdk/sdist.py create mode 100644 eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 85c9137eeb7f..756f4655a2b9 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -95,12 +95,12 @@ async def run_check( # if we have any output collections to complete, do so now here # finally, we need to clean up any temp dirs created by --isolate - # if in_ci(): - # isolate_dir = os.path.join(package, f".venv_{check}") - # try: - # shutil.rmtree(isolate_dir) - # except: - # logger.warning(f"Failed to remove isolate dir {isolate_dir} for {package} / {check}") + if in_ci(): + isolate_dir = os.path.join(package, f".venv_{check}") + try: + shutil.rmtree(isolate_dir) + except: + logger.warning(f"Failed to remove isolate dir {isolate_dir} for {package} / {check}") return CheckResult(package, check, exit_code, duration, stdout, stderr) diff --git a/eng/test_tools.txt b/eng/test_tools.txt index 2bfe6a1bc62e..979a374ad722 100644 --- a/eng/test_tools.txt +++ b/eng/test_tools.txt @@ -1,4 +1,4 @@ -# requirements leveraged by ci for testing + pytest==8.3.5 pytest-asyncio==0.24.0 pytest-cov==5.0.0 @@ -10,7 +10,7 @@ stevedore==5.4.1 pyproject-api==1.8.0 build==1.2.2.post1 -# locking packages defined as deps from azure-sdk-tools + Jinja2==3.1.6 json-delta==2.0.2 readme_renderer==43.0 diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 212e089b82d0..1726b25b93ee 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -27,6 +27,17 @@ # being called from within a site-packages folder. Due to that, we can't trust the location of __file__ REPO_ROOT = discover_repo_root() +PACKAGING_REQUIREMENTS = [ + "wheel==0.45.1", + "packaging==24.2", + "urllib3==2.2.3", + "tomli==2.2.1", + "build==1.2.2.post1", + "pkginfo==1.12.1.2", +] + +TEST_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/test_tools.txt") +DEPENDENCY_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/dependency_tools.txt") class Check(abc.ABC): """ diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py new file mode 100644 index 000000000000..5732571804a8 --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -0,0 +1,207 @@ +import argparse +import os +import sys +from subprocess import CalledProcessError +from typing import Dict, List, Optional + +from .Check import Check, DEPENDENCY_TOOLS_REQUIREMENTS, PACKAGING_REQUIREMENTS, TEST_TOOLS_REQUIREMENTS + +from ci_tools.functions import install_into_venv, is_error_code_5_allowed +from ci_tools.scenario.generation import create_package_and_install +from ci_tools.variables import discover_repo_root, set_envvar_defaults +from ci_tools.logging import logger + +REPO_ROOT = discover_repo_root() + + +class DependencyCheck(Check): + """Shared implementation for dependency bound test environments.""" + + def __init__( + self, + *, + dependency_type: str, + proxy_url: Optional[str], + display_name: str, + additional_packages: Optional[List[str]] = None, + ) -> None: + super().__init__() + self.dependency_type = dependency_type + self.proxy_url = proxy_url + self.display_name = display_name + self.additional_packages = list(additional_packages or []) + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + raise NotImplementedError + + def run(self, args: argparse.Namespace) -> int: + logger.info(f"Running {self.display_name} check...") + + env_defaults = self.get_env_defaults() + if env_defaults: + set_envvar_defaults(env_defaults) + + targeted = self.get_targeted_directories(args) + if not targeted: + logger.warning(f"No target packages discovered for {self.display_name} check.") + return 0 + + results: List[int] = [] + + for parsed in targeted: + package_dir = parsed.folder + package_name = parsed.name + + executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) + logger.info(f"Processing {package_name} using interpreter {executable}") + + try: + self._install_dependency_requirements(executable, package_dir) + except CalledProcessError as exc: + logger.error(f"Failed to install base dependencies for {package_name}: {exc}") + results.append(exc.returncode) + continue + + install_script = os.path.join(REPO_ROOT, "eng/tox/install_depend_packages.py") + install_command = [ + install_script, + "-t", + package_dir, + "-d", + self.dependency_type, + "-w", + staging_directory, + ] + install_result = self.run_venv_command( + executable, + install_command, + cwd=package_dir, + immediately_dump=True, + ) + + if install_result.returncode != 0: + logger.error( + f"install_depend_packages.py failed for {package_name} with exit code {install_result.returncode}." + ) + results.append(install_result.returncode) + continue + + try: + create_package_and_install( + distribution_directory=staging_directory, + target_setup=package_dir, + skip_install=False, + cache_dir=None, + work_dir=staging_directory, + force_create=False, + package_type="wheel", + pre_download_disabled=True, + python_executable=executable, + ) + except CalledProcessError as exc: + logger.error(f"Failed to build/install wheel for {package_name}: {exc}") + results.append(1) + continue + + self.pip_freeze(executable) + + if not self._verify_installed_packages(executable, package_dir, staging_directory): + results.append(1) + continue + + pytest_args = self._build_pytest_args(package_dir, args) + pytest_command = ["-m", "pytest", *pytest_args] + pytest_result = self.run_venv_command( + executable, + pytest_command, + cwd=staging_directory, + immediately_dump=True, + ) + + if pytest_result.returncode != 0: + if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): + logger.info( + "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", + package_name, + ) + continue + logger.error( + f"pytest failed for {package_name} with exit code {pytest_result.returncode}." + ) + results.append(pytest_result.returncode) + + return max(results) if results else 0 + + def get_env_defaults(self) -> Dict[str, str]: + defaults: Dict[str, str] = {"DEPENDENCY_TYPE": self.dependency_type} + if self.proxy_url: + defaults["PROXY_URL"] = self.proxy_url + return defaults + + def _install_dependency_requirements(self, executable: str, package_dir: str) -> None: + install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) + + if os.path.exists(DEPENDENCY_TOOLS_REQUIREMENTS): + install_into_venv(executable, ["-r", DEPENDENCY_TOOLS_REQUIREMENTS], package_dir) + else: + logger.warning(f"Dependency tools requirements file not found at {DEPENDENCY_TOOLS_REQUIREMENTS}.") + + if os.path.exists(TEST_TOOLS_REQUIREMENTS): + install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) + else: + logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") + + if self.additional_packages: + install_into_venv(executable, self.additional_packages, package_dir) + + def _verify_installed_packages(self, executable: str, package_dir: str, staging_directory: str) -> bool: + packages_file = os.path.join(staging_directory, "packages.txt") + if not os.path.exists(packages_file): + logger.error(f"Expected packages.txt not found at {packages_file} for {package_dir}.") + return False + + verify_script = os.path.join(REPO_ROOT, "eng/tox/verify_installed_packages.py") + verify_command = [verify_script, "--packages-file", packages_file] + verify_result = self.run_venv_command(executable, verify_command, cwd=package_dir) + + if verify_result.returncode != 0: + logger.error(f"verify_installed_packages failed for {package_dir} (exit code {verify_result.returncode}).") + if verify_result.stdout: + logger.error(verify_result.stdout) + if verify_result.stderr: + logger.error(verify_result.stderr) + return False + + return True + + def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: + log_level = os.getenv("PYTEST_LOG_LEVEL", "51") + junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") + + default_args = [ + f"{package_dir}", + "-rsfE", + f"--junitxml={junit_path}", + "--verbose", + "--cov-branch", + "--durations=10", + "--ignore=azure", + "--ignore=.tox", + "--ignore-glob=.venv*", + "--ignore=build", + "--ignore=.eggs", + "--ignore=samples", + f"--log-cli-level={log_level}", + "--no-cov", + ] + + pytest_args = list(default_args) + + if getattr(args, "pytest_args", None): + pytest_args.extend(args.pytest_args) + + pytest_args.append(package_dir) + + return pytest_args diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py new file mode 100644 index 000000000000..6d8bd25b96f1 --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -0,0 +1,195 @@ +import argparse +import os +import sys +from subprocess import CalledProcessError +from typing import Dict, List, Optional + +from .Check import Check, DEPENDENCY_TOOLS_REQUIREMENTS, PACKAGING_REQUIREMENTS, TEST_TOOLS_REQUIREMENTS + +from ci_tools.functions import is_error_code_5_allowed, install_into_venv +from ci_tools.scenario.generation import create_package_and_install +from ci_tools.variables import discover_repo_root, set_envvar_defaults +from ci_tools.logging import logger + +REPO_ROOT = discover_repo_root() + +class InstallAndTest(Check): + """Shared implementation for build-and-test style checks.""" + + def __init__( + self, + *, + package_type: str, + proxy_url: Optional[str], + display_name: str, + additional_pytest_args: Optional[List[str]] = None, + coverage_enabled: bool = True, + ) -> None: + super().__init__() + self.package_type = package_type + self.proxy_url = proxy_url + self.display_name = display_name + self.additional_pytest_args = list(additional_pytest_args or []) + self.coverage_enabled = coverage_enabled + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + raise NotImplementedError + + def run(self, args: argparse.Namespace) -> int: + logger.info(f"Running {self.display_name} check...") + + env_defaults = self.get_env_defaults() + if env_defaults: + set_envvar_defaults(env_defaults) + + targeted = self.get_targeted_directories(args) + if not targeted: + logger.warning(f"No target packages discovered for {self.display_name} check.") + return 0 + + results: List[int] = [] + + for parsed in targeted: + package_dir = parsed.folder + package_name = parsed.name + + executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) + logger.info(f"Processing {package_name} using interpreter {executable}") + + try: + self._install_common_requirements(executable, package_dir) + if self.should_install_dev_requirements(): + self.install_dev_reqs(executable, args, package_dir) + self.after_dependencies_installed(executable, package_dir, staging_directory, args) + except CalledProcessError as exc: + logger.error(f"Failed to prepare dependencies for {package_name}: {exc}") + results.append(exc.returncode) + continue + + try: + create_package_and_install( + distribution_directory=staging_directory, + target_setup=package_dir, + skip_install=False, + cache_dir=None, + work_dir=staging_directory, + force_create=False, + package_type=self.package_type, + pre_download_disabled=False, + python_executable=executable, + ) + except CalledProcessError as exc: + logger.error(f"Failed to build/install {self.package_type} for {package_name}: {exc}") + results.append(1) + continue + + try: + self.before_pytest(executable, package_dir, staging_directory, args) + except CalledProcessError as exc: + logger.error(f"Pre-pytest hook failed for {package_name}: {exc}") + results.append(exc.returncode or 1) + continue + + pytest_args = self._build_pytest_args(package_dir, args) + pytest_command = ["-m", "pytest", *pytest_args] + pytest_result = self.run_venv_command( + executable, pytest_command, cwd=staging_directory, immediately_dump=True + ) + + if pytest_result.returncode != 0: + if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): + logger.info( + "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", + package_name, + ) + # Align with tox: skip coverage when tests are skipped entirely + continue + else: + results.append(pytest_result.returncode) + logger.error( + f"pytest failed for {package_name} with exit code {pytest_result.returncode}." + ) + continue + + if not self.coverage_enabled: + continue + + coverage_command = [ + os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), + "-t", + package_dir, + "-r", + REPO_ROOT, + ] + coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) + if coverage_result.returncode != 0: + logger.error( + f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}." + ) + if coverage_result.stdout: + logger.error(coverage_result.stdout) + if coverage_result.stderr: + logger.error(coverage_result.stderr) + results.append(coverage_result.returncode) + + return max(results) if results else 0 + + def get_env_defaults(self) -> Dict[str, str]: + defaults: Dict[str, str] = {} + if self.proxy_url: + defaults["PROXY_URL"] = self.proxy_url + return defaults + + def should_install_dev_requirements(self) -> bool: + return True + + def after_dependencies_installed( + self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace + ) -> None: + del executable, package_dir, staging_directory, args + return None + + def before_pytest( + self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace + ) -> None: + del executable, package_dir, staging_directory, args + return None + + def _install_common_requirements(self, executable: str, package_dir: str) -> None: + install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) + + if os.path.exists(TEST_TOOLS_REQUIREMENTS): + install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) + else: + logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") + + def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: + log_level = os.getenv("PYTEST_LOG_LEVEL", "51") + junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") + + default_args = [ + f"{package_dir}", + "-rsfE", + f"--junitxml={junit_path}", + "--verbose", + "--cov-branch", + "--durations=10", + "--ignore=azure", + "--ignore=.tox", + "--ignore-glob=.venv*", + "--ignore=build", + "--ignore=.eggs", + "--ignore=samples", + f"--log-cli-level={log_level}", + ] + + pytest_args = [*default_args, *self.additional_pytest_args] + + if getattr(args, "pytest_args", None): + pytest_args.extend(args.pytest_args) + + pytest_args.append(package_dir) + + return pytest_args diff --git a/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py new file mode 100644 index 000000000000..65d22c2efa0a --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py @@ -0,0 +1,27 @@ +import argparse +from typing import List, Optional + +from .dependency_check import DependencyCheck + + +class latestdependency(DependencyCheck): + def __init__(self) -> None: + super().__init__( + dependency_type="Latest", + proxy_url="http://localhost:5012", + display_name="latestdependency", + ) + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + """Register the `latestdependency` check.""" + + parents = parent_parsers or [] + parser = subparsers.add_parser("latestdependency", parents=parents, help="Run the latestdependency check") + parser.set_defaults(func=self.run) + parser.add_argument( + "--pytest-args", + nargs=argparse.REMAINDER, + help="Additional arguments forwarded to pytest.", + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/main.py b/eng/tools/azure-sdk-tools/azpysdk/main.py index 34626133df25..b7f5ca99a771 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/main.py +++ b/eng/tools/azure-sdk-tools/azpysdk/main.py @@ -26,11 +26,15 @@ from .verifytypes import verifytypes from .verify_sdist import verify_sdist from .whl import whl +from .sdist import sdist +from .whl_no_aio import whl_no_aio from .verify_whl import verify_whl from .bandit import bandit from .verify_keywords import verify_keywords from .generate import generate from .breaking import breaking +from .mindependency import mindependency +from .latestdependency import latestdependency from ci_tools.logging import configure_logging, logger @@ -88,11 +92,15 @@ def build_parser() -> argparse.ArgumentParser: verifytypes().register(subparsers, [common]) verify_sdist().register(subparsers, [common]) whl().register(subparsers, [common]) + sdist().register(subparsers, [common]) + whl_no_aio().register(subparsers, [common]) verify_whl().register(subparsers, [common]) bandit().register(subparsers, [common]) verify_keywords().register(subparsers, [common]) generate().register(subparsers, [common]) breaking().register(subparsers, [common]) + mindependency().register(subparsers, [common]) + latestdependency().register(subparsers, [common]) return parser diff --git a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py new file mode 100644 index 000000000000..994ac200334e --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py @@ -0,0 +1,32 @@ +import argparse +from typing import List, Optional + +from .dependency_check import DependencyCheck + + +class mindependency(DependencyCheck): + def __init__(self) -> None: + super().__init__( + dependency_type="Minimum", + proxy_url="http://localhost:5013", + display_name="mindependency", + additional_packages=[ + "azure-mgmt-keyvault<7.0.0", + "azure-mgmt-resource<15.0.0", + "azure-mgmt-storage<15.0.0", + ], + ) + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + """Register the `mindependency` check.""" + + parents = parent_parsers or [] + parser = subparsers.add_parser("mindependency", parents=parents, help="Run the mindependency check") + parser.set_defaults(func=self.run) + parser.add_argument( + "--pytest-args", + nargs=argparse.REMAINDER, + help="Additional arguments forwarded to pytest.", + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/sdist.py b/eng/tools/azure-sdk-tools/azpysdk/sdist.py new file mode 100644 index 000000000000..761473ba1f80 --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/sdist.py @@ -0,0 +1,27 @@ +import argparse +from typing import List, Optional + +from .install_and_test import InstallAndTest + + +class sdist(InstallAndTest): + def __init__(self) -> None: + super().__init__( + package_type="sdist", + proxy_url="http://localhost:5005", + display_name="sdist", + ) + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + """Register the `sdist` check. This builds and installs the source distribution before running pytest.""" + + parents = parent_parsers or [] + parser = subparsers.add_parser("sdist", parents=parents, help="Run the sdist check") + parser.set_defaults(func=self.run) + parser.add_argument( + "--pytest-args", + nargs=argparse.REMAINDER, + help="Additional arguments forwarded to pytest.", + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index 1ebf91753a90..2eed20071f4a 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -1,159 +1,28 @@ import argparse -import os -import sys -from subprocess import CalledProcessError from typing import List, Optional -from .Check import Check +from .install_and_test import InstallAndTest -from ci_tools.functions import is_error_code_5_allowed, install_into_venv -from ci_tools.scenario.generation import create_package_and_install -from ci_tools.variables import discover_repo_root, set_envvar_defaults -from ci_tools.logging import logger -REPO_ROOT = discover_repo_root() - -PACKAGING_REQUIREMENTS = [ - "wheel==0.45.1", - "packaging==24.2", - "urllib3==2.2.3", - "tomli==2.2.1", - "build==1.2.2.post1", - "pkginfo==1.12.1.2", -] - -TEST_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/test_tools.txt") - - -class whl(Check): +class whl(InstallAndTest): def __init__(self) -> None: - super().__init__() + super().__init__( + package_type="wheel", + proxy_url="http://localhost:5001", + display_name="whl", + ) def register( self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None ) -> None: - """Register the `whl` check. The `whl` check installs the wheel version of the target package + its dev_requirements.txt, - then invokes pytest. Failures indicate a test issue. - """ + """Register the `whl` check. The `whl` check installs the wheel version of the target package + its + dev requirements, then invokes pytest. Failures indicate a test issue.""" + parents = parent_parsers or [] - p = subparsers.add_parser("whl", parents=parents, help="Run the whl check") - p.set_defaults(func=self.run) - p.add_argument( + parser = subparsers.add_parser("whl", parents=parents, help="Run the whl check") + parser.set_defaults(func=self.run) + parser.add_argument( "--pytest-args", nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) - - def run(self, args: argparse.Namespace) -> int: - """Run the whl check command.""" - logger.info("Running whl check...") - - set_envvar_defaults({"PROXY_URL": "http://localhost:5001"}) - - targeted = self.get_targeted_directories(args) - if not targeted: - logger.warning("No target packages discovered for whl check.") - return 0 - - results = [] - - for parsed in targeted: - package_dir = parsed.folder - package_name = parsed.name - - executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) - logger.info(f"Processing {package_name} using interpreter {executable}") - - try: - self._install_common_requirements(executable, package_dir) - self.install_dev_reqs(executable, args, package_dir) - except CalledProcessError as exc: - logger.error(f"Failed to install dependencies for {package_name}: {exc}") - results.append(exc.returncode) - continue - - try: - create_package_and_install( - distribution_directory=staging_directory, - target_setup=package_dir, - skip_install=False, - cache_dir=None, - work_dir=staging_directory, - force_create=False, - package_type="wheel", - pre_download_disabled=False, - python_executable=executable, - ) - except CalledProcessError as exc: - logger.error(f"Failed to build/install wheel for {package_name}: {exc}") - results.append(1) - continue - - pytest_args = self._build_pytest_args(package_dir, args) - pytest_command = ["-m", "pytest", *pytest_args] - pytest_result = self.run_venv_command(executable, pytest_command, cwd=staging_directory, immediately_dump=True) - - if pytest_result.returncode != 0: - if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): - logger.info( - "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", - package_name, - ) - # Align with tox: skip coverage when tests are skipped entirely - continue - else: - results.append(pytest_result.returncode) - logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") - continue - - coverage_command = [ - os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), - "-t", - package_dir, - "-r", - REPO_ROOT, - ] - coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) - if coverage_result.returncode != 0: - logger.error( - f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}." - ) - if coverage_result.stdout: - logger.error(coverage_result.stdout) - if coverage_result.stderr: - logger.error(coverage_result.stderr) - results.append(coverage_result.returncode) - - return max(results) if results else 0 - - def _install_common_requirements(self, executable: str, package_dir: str) -> None: - install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) - - if os.path.exists(TEST_TOOLS_REQUIREMENTS): - install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) - else: - logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") - - def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: - log_level = os.getenv("PYTEST_LOG_LEVEL", "51") - junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") - - default_args = [ - f"{package_dir}", - "-rsfE", - f"--junitxml={junit_path}", - "--verbose", - "--cov-branch", - "--durations=10", - "--ignore=azure", - "--ignore=.tox", - "--ignore-glob=.venv*", - "--ignore=build", - "--ignore=.eggs", - "--ignore=samples", - f"--log-cli-level={log_level}", - ] - - additional = args.pytest_args if args.pytest_args else [] - - return [*default_args, *additional, package_dir] diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py new file mode 100644 index 000000000000..1c554fc1fd2f --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py @@ -0,0 +1,44 @@ +import argparse +from typing import List, Optional + +from .install_and_test import InstallAndTest +from ci_tools.logging import logger + + +class whl_no_aio(InstallAndTest): + def __init__(self) -> None: + super().__init__( + package_type="wheel", + proxy_url="http://localhost:5004", + display_name="whl_no_aio", + ) + + def register( + self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None + ) -> None: + """Register the `whl_no_aio` check. Matches the wheel check but ensures aiohttp is absent before pytest.""" + + parents = parent_parsers or [] + parser = subparsers.add_parser("whl_no_aio", parents=parents, help="Run the whl_no_aio check") + parser.set_defaults(func=self.run) + parser.add_argument( + "--pytest-args", + nargs=argparse.REMAINDER, + help="Additional arguments forwarded to pytest.", + ) + + def before_pytest( + self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace + ) -> None: + uninstall_cmd = ["-m", "pip", "uninstall", "aiohttp", "--yes"] + result = self.run_venv_command(executable, uninstall_cmd, cwd=package_dir) + if result.returncode != 0: + logger.warning( + "Failed to uninstall aiohttp prior to pytest for %s. Exit code %s.", + package_dir, + result.returncode, + ) + if result.stdout: + logger.warning(result.stdout) + if result.stderr: + logger.warning(result.stderr) From f709a23e77d00d2ff2037e437665f720ca67cdf8 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 4 Dec 2025 20:03:53 +0000 Subject: [PATCH 16/76] enable a couple more envs --- scripts/devops_tasks/set_tox_environment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index 1e8f8f0870cc..3c4c66b95342 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -14,11 +14,11 @@ FULL_BUILD_SET = [ "whl", - # "sdist", + "sdist", # "depends", # "latestdependency", # "mindependency", - # "whl_no_aio", + "whl_no_aio", ] # this branch is checking only whl for now. rest will follow as they migrate PR_BUILD_SET = ["whl"] #, "sdist", "mindependency" From 177efe09334b7a1b685620f04d8edbf8b743e3b9 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 4 Dec 2025 21:22:13 +0000 Subject: [PATCH 17/76] updates to dependency checking --- .../azpysdk/dependency_check.py | 31 +- .../scenario/dependency_resolution.py | 338 ++++++++++++++++ eng/tox/install_depend_packages.py | 380 +----------------- scripts/devops_tasks/set_tox_environment.py | 6 +- 4 files changed, 359 insertions(+), 396 deletions(-) create mode 100644 eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py index 5732571804a8..4157347194f3 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -8,6 +8,7 @@ from ci_tools.functions import install_into_venv, is_error_code_5_allowed from ci_tools.scenario.generation import create_package_and_install +from ci_tools.scenario.dependency_resolution import install_dependent_packages from ci_tools.variables import discover_repo_root, set_envvar_defaults from ci_tools.logging import logger @@ -64,28 +65,16 @@ def run(self, args: argparse.Namespace) -> int: results.append(exc.returncode) continue - install_script = os.path.join(REPO_ROOT, "eng/tox/install_depend_packages.py") - install_command = [ - install_script, - "-t", - package_dir, - "-d", - self.dependency_type, - "-w", - staging_directory, - ] - install_result = self.run_venv_command( - executable, - install_command, - cwd=package_dir, - immediately_dump=True, - ) - - if install_result.returncode != 0: - logger.error( - f"install_depend_packages.py failed for {package_name} with exit code {install_result.returncode}." + try: + install_dependent_packages( + setup_py_file_path=package_dir, + dependency_type=self.dependency_type, + temp_dir=staging_directory, + python_executable=executable, ) - results.append(install_result.returncode) + except Exception as exc: # pragma: no cover - defensive logging + logger.error(f"Dependency resolution failed for {package_name}: {exc}") + results.append(1) continue try: diff --git a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py new file mode 100644 index 000000000000..b469f5778a3c --- /dev/null +++ b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py @@ -0,0 +1,338 @@ +"""Utilities for resolving dependency sets for tox-style checks. + +This module contains the logic previously hosted in ``eng/tox/install_depend_packages.py`` +so that both the legacy tox entry point and the azpysdk checks can share a +single implementation. +""" + +import logging +import os +import re +import subprocess +import sys +from typing import Callable, List, Optional + +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet +from packaging.version import Version +from pypi_tools.pypi import PyPIClient + +from ci_tools.functions import ( + compare_python_version, + get_pip_command, + handle_incompatible_minimum_dev_reqs, +) +from ci_tools.parsing import ParsedSetup, parse_require + +logger = logging.getLogger(__name__) + +DEV_REQ_FILE = "dev_requirements.txt" +NEW_DEV_REQ_FILE = "new_dev_requirements.txt" +PKGS_TXT_FILE = "packages.txt" + +# GENERIC_OVERRIDES dictionaries pair a specific dependency with a MINIMUM or MAXIMUM inclusive bound. +# During LATEST and MINIMUM dependency checks, we sometimes need to ignore versions for various compatibility +# reasons. +MINIMUM_VERSION_GENERIC_OVERRIDES = { + "azure-common": "1.1.10", + "msrest": "0.6.10", + "typing-extensions": "4.6.0", + "opentelemetry-api": "1.3.0", + "opentelemetry-sdk": "1.3.0", + "azure-core": "1.11.0", + "requests": "2.19.0", + "six": "1.12.0", + "cryptography": "41.0.0", + "msal": "1.23.0", + "azure-storage-file-datalake": "12.2.0", +} + +MAXIMUM_VERSION_GENERIC_OVERRIDES = {} + +# SPECIFIC OVERRIDES provide additional filtering of upper and lower bound by +# binding an override to the specific package being processed. As an example, when +# processing the latest or minimum deps for "azure-eventhub", the minimum version of "azure-core" +# will be overridden to 1.25.0. +MINIMUM_VERSION_SPECIFIC_OVERRIDES = { + "azure-eventhub": {"azure-core": "1.25.0"}, + "azure-eventhub-checkpointstoreblob-aio": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-eventhub-checkpointstoreblob": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-identity": {"msal": "1.23.0"}, + "azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"}, + "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}, + "azure-cosmos": {"azure-core": "1.30.0"}, + "azure-appconfiguration-provider": {"azure-appconfiguration": "1.7.2"}, + "azure-ai-evaluation": {"aiohttp": "3.8.6"}, +} + +MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {} + +# PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whose dependencies are being processed) +# filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatibility. +PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = { + ">=3.14.0": { + "typing-extensions": "4.15.0", + }, + ">=3.12.0": { + "azure-core": "1.23.1", + "aiohttp": "3.9.0", + "six": "1.16.0", + "requests": "2.30.0", + }, + ">=3.13.0": { + "typing-extensions": "4.13.0", + "aiohttp": "3.10.6", + }, +} + +PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {} + +# This is used to actively _add_ requirements to the install set. These are used to actively inject +# a new requirement specifier to the set of packages being installed. +SPECIAL_CASE_OVERRIDES = { + # this package has an override + "azure-core": { + # if the version being installed matches this specifier, add the listed packages to the install list + "<1.24.0": ["msrest<0.7.0"], + } +} + +__all__ = [ + "install_dependent_packages", + "filter_dev_requirements", + "find_released_packages", +] + + +def install_dependent_packages( + setup_py_file_path: str, + dependency_type: str, + temp_dir: str, + python_executable: Optional[str] = None, +) -> None: + """Identify and install the dependency set for a package. + + :param setup_py_file_path: Path to the target package directory. + :param dependency_type: Either ``"Latest"`` or ``"Minimum"``. + :param temp_dir: Directory where temporary artifacts (e.g. filtered requirements, packages.txt) are written. + :param python_executable: Optional interpreter whose environment should receive the installations. Defaults to + the current ``sys.executable``. + """ + + python_exe = python_executable or sys.executable + + released_packages = find_released_packages(setup_py_file_path, dependency_type) + override_added_packages: List[str] = [] + + for pkg_spec in released_packages: + override_added_packages.extend(check_pkg_against_overrides(pkg_spec)) + + logger.info("%s released packages: %s", dependency_type, released_packages) + + additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None + if dependency_type == "Minimum": + additional_filter_fn = handle_incompatible_minimum_dev_reqs + + dev_req_file_path = filter_dev_requirements( + setup_py_file_path, released_packages, temp_dir, additional_filter_fn + ) + + if override_added_packages: + logger.info("Expanding the requirement set by the packages %s.", override_added_packages) + + install_set = released_packages + list(set(override_added_packages)) + + if install_set or dev_req_file_path: + install_packages(install_set, dev_req_file_path, python_exe) + + if released_packages: + pkgs_file_path = os.path.join(temp_dir, PKGS_TXT_FILE) + with open(pkgs_file_path, "w", encoding="utf-8") as pkgs_file: + for package in released_packages: + pkgs_file.write(package + "\n") + logger.info("Created file %s to track azure packages found on PyPI", pkgs_file_path) + + +def check_pkg_against_overrides(pkg_specifier: str) -> List[str]: + """Apply ``SPECIAL_CASE_OVERRIDES`` for a resolved package specifier.""" + + additional_installs: List[str] = [] + target_package, target_version = pkg_specifier.split("==") + + target_version_obj = Version(target_version) + if target_package in SPECIAL_CASE_OVERRIDES: + for specifier_set, extras in SPECIAL_CASE_OVERRIDES[target_package].items(): + spec = SpecifierSet(specifier_set) + if target_version_obj in spec: + additional_installs.extend(extras) + + return additional_installs + + +def find_released_packages(setup_py_path: str, dependency_type: str) -> List[str]: + """Resolve the appropriate released dependency versions for a package.""" + + pkg_info = ParsedSetup.from_path(setup_py_path) + requires = [r for r in pkg_info.requires if "-nspkg" not in r] + available_packages = [ + spec for spec in map(lambda req: process_requirement(req, dependency_type, pkg_info.name), requires) if spec + ] + return available_packages + + +def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]: + """Apply generic, platform, and package-specific bounds to the available versions list.""" + + if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES: + versions = [ + v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) + ] + + for platform_bound, restrictions in PLATFORM_SPECIFIC_MINIMUM_OVERRIDES.items(): + if compare_python_version(platform_bound) and pkg_name in restrictions: + versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])] + + if ( + originating_pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES + and pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] + ): + versions = [ + v + for v in versions + if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + ] + + if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES: + versions = [ + v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) + ] + + for platform_bound, restrictions in PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES.items(): + if compare_python_version(platform_bound) and pkg_name in restrictions: + versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])] + + if ( + originating_pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES + and pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] + ): + versions = [ + v + for v in versions + if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + ] + + return versions + + +def process_requirement(req: str, dependency_type: str, orig_pkg_name: str) -> str: + """Determine the matching version for a requirement based on dependency type.""" + + requirement = parse_require(req) + pkg_name = requirement.name + spec = requirement.specifier if len(requirement.specifier) else None + + if not (requirement.marker is None or requirement.marker.evaluate()): + logger.info( + "Skipping requirement %r. Environment marker %r does not apply to current environment.", + req, + str(requirement.marker), + ) + return "" + + client = PyPIClient() + versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)] + logger.info("Versions available on PyPI for %s: %s", pkg_name, versions) + + versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) + + if dependency_type == "Latest": + versions.reverse() + + for version in versions: + if spec is None or version in spec: + logger.info( + "Found %s version %s that matches specifier %s", + dependency_type, + version, + spec, + ) + return pkg_name + "==" + version + + logger.error("No version is found on PyPI for package %s that matches specifier %s", pkg_name, spec) + return "" + + +def check_req_against_exclusion(req: str, req_to_exclude: str) -> bool: + """Return ``True`` if the dev requirement matches the package slated for exclusion.""" + + req_id = "" + for char in req: + if re.match(r"[A-Za-z0-9_-]", char): + req_id += char + else: + break + + return req_id == req_to_exclude + + +def filter_dev_requirements( + package_directory: str, + released_packages: List[str], + temp_dir: str, + additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None, +) -> str: + """Filter dev requirements to avoid reinstalling packages we just resolved.""" + + dev_req_path = os.path.join(package_directory, DEV_REQ_FILE) + with open(dev_req_path, "r", encoding="utf-8") as dev_req_file: + requirements = dev_req_file.readlines() + + released_packages_parsed = [parse_require(p) for p in released_packages] + released_package_names = [p.name for p in released_packages_parsed] + + prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] + req_to_exclude = [ + req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names + ] + req_to_exclude.extend(released_package_names) + + filtered_req = [ + req + for req in requirements + if os.path.basename(req.replace("\n", "")) not in req_to_exclude + and not any(check_req_against_exclusion(req, item) for item in req_to_exclude) + ] + + if additional_filter_fn: + filtered_req = additional_filter_fn(package_directory, filtered_req, released_packages_parsed) + + logger.info("Filtered dev requirements: %s", filtered_req) + + new_dev_req_path = "" + if filtered_req: + new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE) + with open(new_dev_req_path, "w", encoding="utf-8") as dev_req_file: + dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req) + + return new_dev_req_path + + +def install_packages(packages: List[str], req_file: str, python_executable: str) -> None: + """Install resolved packages (and optionally a requirements file) into the target environment.""" + + python_exe = python_executable or sys.executable + commands = get_pip_command(python_exe) + commands.append("install") + + if commands[0] == "uv": + commands.extend(["--python", python_exe]) + + if packages: + commands.extend(packages) + + if req_file: + commands.extend(["-r", req_file]) + + logger.info("Installing packages. Command: %s", commands) + subprocess.check_call(commands) diff --git a/eng/tox/install_depend_packages.py b/eng/tox/install_depend_packages.py index 13303000dc43..469f27a4a1b0 100644 --- a/eng/tox/install_depend_packages.py +++ b/eng/tox/install_depend_packages.py @@ -6,383 +6,16 @@ # -------------------------------------------------------------------------------------------- import argparse +import logging import os import sys -import logging -import re - -from subprocess import check_call -from typing import TYPE_CHECKING, Callable, Optional -from pypi_tools.pypi import PyPIClient -from packaging.specifiers import SpecifierSet -from packaging.version import Version -from packaging.requirements import Requirement - -from ci_tools.parsing import ParsedSetup, parse_require -from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs, get_pip_command - -from typing import List -DEV_REQ_FILE = "dev_requirements.txt" -NEW_DEV_REQ_FILE = "new_dev_requirements.txt" -PKGS_TXT_FILE = "packages.txt" +from ci_tools.scenario.dependency_resolution import install_dependent_packages logging.getLogger().setLevel(logging.INFO) -# GENERIC_OVERRIDES dictionaries pair a specific dependency with a MINIMUM or MAXIMUM inclusive bound. -# During LATEST and MINIMUM dependency checks, we sometimes need to ignore versions for various compatibility -# reasons. -MINIMUM_VERSION_GENERIC_OVERRIDES = { - "azure-common": "1.1.10", - "msrest": "0.6.10", - "typing-extensions": "4.6.0", - "opentelemetry-api": "1.3.0", - "opentelemetry-sdk": "1.3.0", - "azure-core": "1.11.0", - "requests": "2.19.0", - "six": "1.12.0", - "cryptography": "41.0.0", - "msal": "1.23.0", - "azure-storage-file-datalake": "12.2.0", -} - -MAXIMUM_VERSION_GENERIC_OVERRIDES = {} - -# SPECIFIC OVERRIDES provide additional filtering of upper and lower bound by -# binding an override to the specific package being processed. As an example, when -# processing the latest or minimum deps for "azure-eventhub", the minimum version of "azure-core" -# will be overridden to 1.25.0. -MINIMUM_VERSION_SPECIFIC_OVERRIDES = { - "azure-eventhub": {"azure-core": "1.25.0"}, - "azure-eventhub-checkpointstoreblob-aio": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-eventhub-checkpointstoreblob": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-identity": {"msal": "1.23.0"}, - "azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"}, - "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}, - "azure-cosmos": {"azure-core": "1.30.0"}, - "azure-appconfiguration-provider": {"azure-appconfiguration": "1.7.2"}, - "azure-ai-evaluation": {"aiohttp": "3.8.6"} -} - -MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {} - -# PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whos dependencies are being processed) -# filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compat -PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = { - ">=3.14.0": { - "typing-extensions": "4.15.0", - }, - ">=3.12.0": { - "azure-core": "1.23.1", - "aiohttp": "3.9.0", - "six": "1.16.0", - "requests": "2.30.0" - }, - ">=3.13.0": { - "typing-extensions": "4.13.0", - "aiohttp": "3.10.6" - } -} - -PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {} - -# This is used to actively _add_ requirements to the install set. These are used to actively inject -# a new requirement specifier to the set of packages being installed. -SPECIAL_CASE_OVERRIDES = { - # this package has an override - "azure-core": { - # if the version being installed matches this specifier, add the listed packages to the install list - "<1.24.0": ["msrest<0.7.0"] - } -} - - -def install_dependent_packages(setup_py_file_path, dependency_type, temp_dir): - # This method identifies latest/ minimal version of dependent packages and installs them from pyPI - # dependency type must either be latest or minimum - # Latest dependency will find latest released package that satisfies requires of given package name - # Minimum type will find minimum version on PyPI that satisfies requires of given package name - released_packages = find_released_packages(setup_py_file_path, dependency_type) - override_added_packages = [] - - # new section added to account for difficulties with msrest - for pkg_spec in released_packages: - override_added_packages.extend(check_pkg_against_overrides(pkg_spec)) - - logging.info("%s released packages: %s", dependency_type, released_packages) - - additional_filter_fn = None - if dependency_type == "Minimum": - additional_filter_fn = handle_incompatible_minimum_dev_reqs - - # before september 2024, filter_dev_requirements only would remove any packages present in released_packages from the dev_requirements, - # then create a new file "new_dev_requirements.txt" without the problematic packages. - # after september 2024, filter_dev_requirements will also check for **compatibility** with the packages being installed when filtering the dev_requirements. - dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, additional_filter_fn) - - if override_added_packages: - logging.info(f"Expanding the requirement set by the packages {override_added_packages}.") - - install_set = released_packages + list(set(override_added_packages)) - - # install released dependent packages - if released_packages or dev_req_file_path: - install_packages(install_set, dev_req_file_path) - - if released_packages: - # create a file with list of packages and versions found based on minimum or latest check on PyPI - # This file can be used to verify if we have correct version installed - pkgs_file_path = os.path.join(temp_dir, PKGS_TXT_FILE) - with open(pkgs_file_path, "w") as pkgs_file: - for package in released_packages: - pkgs_file.write(package + "\n") - logging.info("Created file %s to track azure packages found on PyPI", pkgs_file_path) - - -def check_pkg_against_overrides(pkg_specifier: str) -> List[str]: - """ - Checks a set of package specifiers of form "[A==1.0.0, B=2.0.0]". Used to inject additional package installations - as indicated by the SPECIAL_CASE_OVERRIDES dictionary. - - :param str pkg_specifier: A specifically targeted package that is about to be passed to install_packages. - """ - additional_installs = [] - target_package, target_version = pkg_specifier.split("==") - - target_version = Version(target_version) - if target_package in SPECIAL_CASE_OVERRIDES: - special_case_specifiers = SPECIAL_CASE_OVERRIDES[target_package] - - for specifier_set in special_case_specifiers.keys(): - spec = SpecifierSet(specifier_set) - if target_version in spec: - additional_installs.extend(special_case_specifiers[specifier_set]) - - return additional_installs - - -def find_released_packages(setup_py_path, dependency_type): - # this method returns list of required available package on PyPI in format == - pkg_info = ParsedSetup.from_path(setup_py_path) - - # parse setup.py and find install requires - requires = [r for r in pkg_info.requires if "-nspkg" not in r] - - # Get available version on PyPI for each required package - avlble_packages = [x for x in map(lambda x: process_requirement(x, dependency_type, pkg_info.name), requires) if x] - - return avlble_packages - - -def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]: - """ - Processes a target package based on an originating package (target is a dep of originating) and the versions available from pypi for the target package. - - Returns the set of versions AFTER general, platform, and package-specific overrides have been applied. - - :param str originating_pkg_name: The name of the package whos requirements are being processed. - :param str pkg_name: A specific requirement of the originating package being processed. - :param List[str] versions: All the versions available on pypi for pkg_name. - """ - - # lower bound general - if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES: - versions = [ - v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) - ] - - # lower bound platform-specific - for platform_bound in PLATFORM_SPECIFIC_MINIMUM_OVERRIDES.keys(): - if compare_python_version(platform_bound): - restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound] - - if pkg_name in restrictions: - versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])] - - # lower bound package-specific - if ( - originating_pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES - and pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] - ): - versions = [ - v - for v in versions - if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) - ] - # upper bound general - if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES: - versions = [ - v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) - ] - - # upper bound platform - for platform_bound in PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES.keys(): - if compare_python_version(platform_bound): - restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound] - - if pkg_name in restrictions: - versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])] - - # upper bound package-specific - if ( - originating_pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES - and pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] - ): - versions = [ - v - for v in versions - if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) - ] - - return versions - - -def process_requirement(req, dependency_type, orig_pkg_name): - # this method finds either latest or minimum version of a package that is available on PyPI - - # find package name and requirement specifier from requires - requirement = parse_require(req) - pkg_name = requirement.name - spec = requirement.specifier if len(requirement.specifier) else None - - # Filter out requirements with environment markers that don't match the current environment - # e.g. `; python_version > 3.10` when running on Python3.9 - if not (requirement.marker is None or requirement.marker.evaluate()): - logging.info( - f"Skipping requirement {req!r}. Environment marker {str(requirement.marker)!r} " - + "does not apply to current environment." - ) - return "" - - # get available versions on PyPI - client = PyPIClient() - versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)] - logging.info("Versions available on PyPI for %s: %s", pkg_name, versions) - - # think of the various versions that come back from pypi as the top of a funnel - # We apply generic overrides -> platform specific overrides -> package specific overrides - versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) - - # Search from lowest to latest in case of finding minimum dependency - # Search from latest to lowest in case of finding latest required version - # reverse the list to get latest version first - if dependency_type == "Latest": - versions.reverse() - - # return first version that matches specifier in == format - for version in versions: - # if there IS NO specifier, then we should take the first entry. we have already sorted for latest/minimum. - if spec is None: - return pkg_name + "==" + version - - if version in spec: - logging.info( - "Found %s version %s that matches specifier %s", - dependency_type, - version, - spec, - ) - return pkg_name + "==" + version - - logging.error( - "No version is found on PyPI for package %s that matches specifier %s", - pkg_name, - spec, - ) - return "" - - -def check_req_against_exclusion(req, req_to_exclude): - """ - This function evaluates a requirement from a dev_requirements file against a file name. Returns True - if the requirement is for the same package listed in "req_to_exclude". False otherwise. - - :param req: An incoming "req" looks like a requirement that appears in a dev_requirements file. EG: [ "../../../eng/tools/azure-sdk-tools", - "https://docsupport.blob.core.windows.net/repackaged/cffi-1.14.6-cp310-cp310-win_amd64.whl; sys_platform=='win32' and python_version >= '3.10'", - "msrestazure>=0.4.11", "pytest" ] - - :param req_to_exclude: A valid and complete python package name. No specifiers. - """ - req_id = "" - for c in req: - if re.match(r"[A-Za-z0-9_-]", c): - req_id += c - else: - break - - return req_id == req_to_exclude - - -def filter_dev_requirements( - package_directory, - released_packages, - temp_dir, - additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None, -): - """ - This function takes an existing package path, a list of specific package specifiers that we have resolved, a temporary directory to write - the modified dev_requirements to, and an optional additional_filter_fn that can be used to further filter the dev_requirements file if necessary. - - The function will filter out any requirements present in the dev_requirements file that are present in the released_packages list (aka are required - by the package). - """ - # This method returns list of requirements from dev_requirements by filtering out packages in given list - dev_req_path = os.path.join(package_directory, DEV_REQ_FILE) - requirements = [] - with open(dev_req_path, "r") as dev_req_file: - requirements = dev_req_file.readlines() - - # filter out any package available on PyPI (released_packages) - # include packages without relative reference and packages not available on PyPI - released_packages = [parse_require(p) for p in released_packages] - released_package_names = [p.name for p in released_packages] - # find prebuilt whl paths in dev requiremente - prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] - # filter any req if wheel is for a released package - req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names] - req_to_exclude.extend(released_package_names) - - filtered_req = [ - req - for req in requirements - if os.path.basename(req.replace("\n", "")) not in req_to_exclude - and not any([check_req_against_exclusion(req, i) for i in req_to_exclude]) - ] - - if additional_filter_fn: - # this filter function handles the case where a dev requirement is incompatible with the current set of targeted packages - filtered_req = additional_filter_fn(package_directory, filtered_req, released_packages) - - logging.info("Filtered dev requirements: %s", filtered_req) - - new_dev_req_path = "" - if filtered_req: - # create new dev requirements file with different name for filtered requirements - new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE) - with open(new_dev_req_path, "w") as dev_req_file: - dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req) - - return new_dev_req_path - - -def install_packages(packages, req_file): - # install list of given packages from PyPI - commands = get_pip_command() - commands.append("install") - - if packages: - commands.extend(packages) - - if req_file: - commands.extend(["-r", req_file]) - - logging.info("Installing packages. Command: %s", commands) - check_call(commands) - - -if __name__ == "__main__": +def main() -> None: parser = argparse.ArgumentParser(description="Install either latest or minimum version of dependent packages.") parser.add_argument( @@ -412,11 +45,14 @@ def install_packages(packages, req_file): args = parser.parse_args() - setup_path = os.path.join(os.path.abspath(args.target_package)) + setup_path = os.path.abspath(args.target_package) if not (os.path.exists(setup_path) and os.path.exists(args.work_dir)): logging.error("Invalid arguments. Please make sure target directory and working directory are valid path") sys.exit(1) + install_dependent_packages(setup_path, args.dependency_type, args.work_dir, python_executable=sys.executable) - install_dependent_packages(setup_path, args.dependency_type, args.work_dir) + +if __name__ == "__main__": + main() diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index 3c4c66b95342..dd879c486f50 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -15,9 +15,9 @@ FULL_BUILD_SET = [ "whl", "sdist", - # "depends", - # "latestdependency", - # "mindependency", + "import_all", + "latestdependency", + "mindependency", "whl_no_aio", ] # this branch is checking only whl for now. rest will follow as they migrate From d3006bff3f8a9c5cd29188b58eda0e0ed84a5ea0 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 5 Dec 2025 02:15:39 +0000 Subject: [PATCH 18/76] ensure that the local CWD being the package directory being added to the PATH doesn't break import_all script --- eng/tools/azure-sdk-tools/azpysdk/import_all.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/import_all.py b/eng/tools/azure-sdk-tools/azpysdk/import_all.py index a624512013c8..c80752dc9aaa 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/import_all.py +++ b/eng/tools/azure-sdk-tools/azpysdk/import_all.py @@ -76,7 +76,7 @@ def run(self, args: argparse.Namespace) -> int: import_script_all = "from {0} import *".format(parsed.namespace) commands = [executable, "-c", import_script_all] - outcomes.append(check_call(commands)) + outcomes.append(check_call(commands, cwd=staging_directory)) logger.info("Verified module dependency, no issues found") else: logger.info("Package {} is excluded from dependency check".format(parsed.name)) From 8f48ce581065240c0684abeb2c8c434191292db4 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 5 Dec 2025 02:21:41 +0000 Subject: [PATCH 19/76] changes to import_all --- eng/tools/azure-sdk-tools/azpysdk/import_all.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/import_all.py b/eng/tools/azure-sdk-tools/azpysdk/import_all.py index c80752dc9aaa..d16da183defb 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/import_all.py +++ b/eng/tools/azure-sdk-tools/azpysdk/import_all.py @@ -77,7 +77,11 @@ def run(self, args: argparse.Namespace) -> int: commands = [executable, "-c", import_script_all] outcomes.append(check_call(commands, cwd=staging_directory)) - logger.info("Verified module dependency, no issues found") + + if outcomes[-1] == 0: + logger.info("Verified module dependency, no issues found") + else: + logger.error(f"Dependency issue found when invoking \"{import_script_all}\" against package {parsed.name}") else: logger.info("Package {} is excluded from dependency check".format(parsed.name)) From a9b5bd9945415a20dfc8fa3db98405c7a6c80768 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 10 Dec 2025 23:03:46 +0000 Subject: [PATCH 20/76] mis-using ignore glob means we accidentally picked up at file from within the site-packages directory --- eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 6d8bd25b96f1..25a219511386 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -178,7 +178,7 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List "--durations=10", "--ignore=azure", "--ignore=.tox", - "--ignore-glob=.venv*", + "--ignore-glob=**/.venv*", "--ignore=build", "--ignore=.eggs", "--ignore=samples", From 8f48ac68c6d2dd8a5f26cab647b63d3f29296405 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 11 Dec 2025 01:27:09 +0000 Subject: [PATCH 21/76] small upgrades --- eng/scripts/dispatch_checks.py | 2 +- eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 756f4655a2b9..c89086b5d536 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -78,7 +78,7 @@ async def run_check( header = f"===== OUTPUT: {check} :: {package} (exit {exit_code}) =====" trailer = "=" * len(header) if in_ci(): - print(f"##[group]{package} :: {check}") + print(f"##[group]{package} :: {check} :: {exit_code}") if stdout: print(header) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 25a219511386..b86c515a16ea 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -179,6 +179,7 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List "--ignore=azure", "--ignore=.tox", "--ignore-glob=**/.venv*", + "--ignore-glob=**/.venv*/**", "--ignore=build", "--ignore=.eggs", "--ignore=samples", From 526a5f5d6e6e28d9b1f0f38babde8fe419e0af76 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 11 Dec 2025 01:53:48 +0000 Subject: [PATCH 22/76] force re-init of logger from subprocess --- eng/tools/azure-sdk-tools/ci_tools/logging/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/ci_tools/logging/__init__.py b/eng/tools/azure-sdk-tools/ci_tools/logging/__init__.py index a7491ec3a461..ff6e22e6fe22 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/logging/__init__.py +++ b/eng/tools/azure-sdk-tools/ci_tools/logging/__init__.py @@ -32,7 +32,7 @@ def configure_logging(args: argparse.Namespace, fmt: str = "%(asctime)s [%(level logger.setLevel(numeric_level) # Propagate logger config globally if needed - logging.basicConfig(level=numeric_level, format=fmt) + logging.basicConfig(level=numeric_level, format=fmt, force=True) def now() -> str: From af16ff40024f9635518b0cd9d36b10e63f2e24fd Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 12 Dec 2025 19:06:12 +0000 Subject: [PATCH 23/76] we should use the wheel dir to fix the parallelism issues --- .github/workflows/azure-sdk-tools.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index 4858ff6e9aad..e0eb970e4ade 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -92,7 +92,8 @@ jobs: - name: Run all discovered checks against azure-template using uv as package manager run: | - python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" azure-template + sdk_build azure-template -d $(pwd)/wheels + python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" --wheel_dir $(pwd)/wheels azure-template shell: bash env: TOX_PIP_IMPL: "uv" @@ -104,7 +105,7 @@ jobs: - name: Run all discovered checks against azure-template using pip as package manager run: | - python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" azure-template + python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" --wheel_dir $(pwd)/wheels azure-template shell: bash dev-setup-and-import: From 1ef77ceb03f0628680a8c9d48f902801f02ea4dc Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 12 Dec 2025 19:18:35 +0000 Subject: [PATCH 24/76] fix the failing test --- eng/tools/azure-sdk-tools/tests/test_logging_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/tests/test_logging_config.py b/eng/tools/azure-sdk-tools/tests/test_logging_config.py index 9e3db3175fec..c58af719598a 100644 --- a/eng/tools/azure-sdk-tools/tests/test_logging_config.py +++ b/eng/tools/azure-sdk-tools/tests/test_logging_config.py @@ -22,5 +22,5 @@ def test_configure_logging_various_levels(mock_basic_config, cli_args, level_env configure_logging(cli_args) assert logger.level == expected_level mock_basic_config.assert_called_with( - level=expected_level, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s" + level=expected_level, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s", force=True ) From fd740fd0267d89babf2c1dd31f9bbfbb8b219c11 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 12 Dec 2025 19:19:01 +0000 Subject: [PATCH 25/76] save progress --- .github/workflows/azure-sdk-tools.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index e0eb970e4ade..5d54613b5bce 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -70,7 +70,7 @@ jobs: - name: Install azure-sdk-tools on in global uv, discover azpysdk checks run: | - uv pip install --system eng/tools/azure-sdk-tools[build,ghtools,conda] + uv pip install --system eng/tools/azure-sdk-tools[build,ghtools,conda,systemperf] # Discover available azpysdk commands from the {command1,command2,...} line in help output CHECKS=$(azpysdk -h 2>&1 | \ From 099d79f94ef3fb89bbada7b960ffc95a6de0f9d1 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 12 Dec 2025 19:57:20 +0000 Subject: [PATCH 26/76] apply black format. add buildid to the build run --- .github/workflows/azure-sdk-tools.yml | 2 +- eng/tools/azure-sdk-tools/azpysdk/Check.py | 1 + .../azpysdk/dependency_check.py | 4 +- .../azure-sdk-tools/azpysdk/import_all.py | 4 +- .../azpysdk/install_and_test.py | 5 +- .../scenario/dependency_resolution.py | 422 +++++++++--------- 6 files changed, 215 insertions(+), 223 deletions(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index 5d54613b5bce..d3e3d3ccb962 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -92,7 +92,7 @@ jobs: - name: Run all discovered checks against azure-template using uv as package manager run: | - sdk_build azure-template -d $(pwd)/wheels + sdk_build azure-template -d $(pwd)/wheels --buildid 20250101.1 python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" --wheel_dir $(pwd)/wheels azure-template shell: bash env: diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 1726b25b93ee..f01c2d8d3beb 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -39,6 +39,7 @@ TEST_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/test_tools.txt") DEPENDENCY_TOOLS_REQUIREMENTS = os.path.join(REPO_ROOT, "eng/dependency_tools.txt") + class Check(abc.ABC): """ Base class for checks. diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py index 4157347194f3..c2757295e501 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -116,9 +116,7 @@ def run(self, args: argparse.Namespace) -> int: package_name, ) continue - logger.error( - f"pytest failed for {package_name} with exit code {pytest_result.returncode}." - ) + logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") results.append(pytest_result.returncode) return max(results) if results else 0 diff --git a/eng/tools/azure-sdk-tools/azpysdk/import_all.py b/eng/tools/azure-sdk-tools/azpysdk/import_all.py index d16da183defb..f1f487fcd1ec 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/import_all.py +++ b/eng/tools/azure-sdk-tools/azpysdk/import_all.py @@ -81,7 +81,9 @@ def run(self, args: argparse.Namespace) -> int: if outcomes[-1] == 0: logger.info("Verified module dependency, no issues found") else: - logger.error(f"Dependency issue found when invoking \"{import_script_all}\" against package {parsed.name}") + logger.error( + f'Dependency issue found when invoking "{import_script_all}" against package {parsed.name}' + ) else: logger.info("Package {} is excluded from dependency check".format(parsed.name)) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index b86c515a16ea..123b5cfa1d6a 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -13,6 +13,7 @@ REPO_ROOT = discover_repo_root() + class InstallAndTest(Check): """Shared implementation for build-and-test style checks.""" @@ -108,9 +109,7 @@ def run(self, args: argparse.Namespace) -> int: continue else: results.append(pytest_result.returncode) - logger.error( - f"pytest failed for {package_name} with exit code {pytest_result.returncode}." - ) + logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") continue if not self.coverage_enabled: diff --git a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py index b469f5778a3c..90e7bab9bc52 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py +++ b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py @@ -18,9 +18,9 @@ from pypi_tools.pypi import PyPIClient from ci_tools.functions import ( - compare_python_version, - get_pip_command, - handle_incompatible_minimum_dev_reqs, + compare_python_version, + get_pip_command, + handle_incompatible_minimum_dev_reqs, ) from ci_tools.parsing import ParsedSetup, parse_require @@ -34,17 +34,17 @@ # During LATEST and MINIMUM dependency checks, we sometimes need to ignore versions for various compatibility # reasons. MINIMUM_VERSION_GENERIC_OVERRIDES = { - "azure-common": "1.1.10", - "msrest": "0.6.10", - "typing-extensions": "4.6.0", - "opentelemetry-api": "1.3.0", - "opentelemetry-sdk": "1.3.0", - "azure-core": "1.11.0", - "requests": "2.19.0", - "six": "1.12.0", - "cryptography": "41.0.0", - "msal": "1.23.0", - "azure-storage-file-datalake": "12.2.0", + "azure-common": "1.1.10", + "msrest": "0.6.10", + "typing-extensions": "4.6.0", + "opentelemetry-api": "1.3.0", + "opentelemetry-sdk": "1.3.0", + "azure-core": "1.11.0", + "requests": "2.19.0", + "six": "1.12.0", + "cryptography": "41.0.0", + "msal": "1.23.0", + "azure-storage-file-datalake": "12.2.0", } MAXIMUM_VERSION_GENERIC_OVERRIDES = {} @@ -54,16 +54,16 @@ # processing the latest or minimum deps for "azure-eventhub", the minimum version of "azure-core" # will be overridden to 1.25.0. MINIMUM_VERSION_SPECIFIC_OVERRIDES = { - "azure-eventhub": {"azure-core": "1.25.0"}, - "azure-eventhub-checkpointstoreblob-aio": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-eventhub-checkpointstoreblob": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, - "azure-identity": {"msal": "1.23.0"}, - "azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"}, - "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}, - "azure-cosmos": {"azure-core": "1.30.0"}, - "azure-appconfiguration-provider": {"azure-appconfiguration": "1.7.2"}, - "azure-ai-evaluation": {"aiohttp": "3.8.6"}, + "azure-eventhub": {"azure-core": "1.25.0"}, + "azure-eventhub-checkpointstoreblob-aio": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-eventhub-checkpointstoreblob": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, + "azure-identity": {"msal": "1.23.0"}, + "azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"}, + "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}, + "azure-cosmos": {"azure-core": "1.30.0"}, + "azure-appconfiguration-provider": {"azure-appconfiguration": "1.7.2"}, + "azure-ai-evaluation": {"aiohttp": "3.8.6"}, } MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {} @@ -71,19 +71,19 @@ # PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whose dependencies are being processed) # filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatibility. PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = { - ">=3.14.0": { - "typing-extensions": "4.15.0", - }, - ">=3.12.0": { - "azure-core": "1.23.1", - "aiohttp": "3.9.0", - "six": "1.16.0", - "requests": "2.30.0", - }, - ">=3.13.0": { - "typing-extensions": "4.13.0", - "aiohttp": "3.10.6", - }, + ">=3.14.0": { + "typing-extensions": "4.15.0", + }, + ">=3.12.0": { + "azure-core": "1.23.1", + "aiohttp": "3.9.0", + "six": "1.16.0", + "requests": "2.30.0", + }, + ">=3.13.0": { + "typing-extensions": "4.13.0", + "aiohttp": "3.10.6", + }, } PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {} @@ -91,248 +91,240 @@ # This is used to actively _add_ requirements to the install set. These are used to actively inject # a new requirement specifier to the set of packages being installed. SPECIAL_CASE_OVERRIDES = { - # this package has an override - "azure-core": { - # if the version being installed matches this specifier, add the listed packages to the install list - "<1.24.0": ["msrest<0.7.0"], - } + # this package has an override + "azure-core": { + # if the version being installed matches this specifier, add the listed packages to the install list + "<1.24.0": ["msrest<0.7.0"], + } } __all__ = [ - "install_dependent_packages", - "filter_dev_requirements", - "find_released_packages", + "install_dependent_packages", + "filter_dev_requirements", + "find_released_packages", ] def install_dependent_packages( - setup_py_file_path: str, - dependency_type: str, - temp_dir: str, - python_executable: Optional[str] = None, + setup_py_file_path: str, + dependency_type: str, + temp_dir: str, + python_executable: Optional[str] = None, ) -> None: - """Identify and install the dependency set for a package. + """Identify and install the dependency set for a package. - :param setup_py_file_path: Path to the target package directory. - :param dependency_type: Either ``"Latest"`` or ``"Minimum"``. - :param temp_dir: Directory where temporary artifacts (e.g. filtered requirements, packages.txt) are written. - :param python_executable: Optional interpreter whose environment should receive the installations. Defaults to - the current ``sys.executable``. - """ + :param setup_py_file_path: Path to the target package directory. + :param dependency_type: Either ``"Latest"`` or ``"Minimum"``. + :param temp_dir: Directory where temporary artifacts (e.g. filtered requirements, packages.txt) are written. + :param python_executable: Optional interpreter whose environment should receive the installations. Defaults to + the current ``sys.executable``. + """ - python_exe = python_executable or sys.executable + python_exe = python_executable or sys.executable - released_packages = find_released_packages(setup_py_file_path, dependency_type) - override_added_packages: List[str] = [] + released_packages = find_released_packages(setup_py_file_path, dependency_type) + override_added_packages: List[str] = [] - for pkg_spec in released_packages: - override_added_packages.extend(check_pkg_against_overrides(pkg_spec)) + for pkg_spec in released_packages: + override_added_packages.extend(check_pkg_against_overrides(pkg_spec)) - logger.info("%s released packages: %s", dependency_type, released_packages) + logger.info("%s released packages: %s", dependency_type, released_packages) - additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None - if dependency_type == "Minimum": - additional_filter_fn = handle_incompatible_minimum_dev_reqs + additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None + if dependency_type == "Minimum": + additional_filter_fn = handle_incompatible_minimum_dev_reqs - dev_req_file_path = filter_dev_requirements( - setup_py_file_path, released_packages, temp_dir, additional_filter_fn - ) + dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, additional_filter_fn) - if override_added_packages: - logger.info("Expanding the requirement set by the packages %s.", override_added_packages) + if override_added_packages: + logger.info("Expanding the requirement set by the packages %s.", override_added_packages) - install_set = released_packages + list(set(override_added_packages)) + install_set = released_packages + list(set(override_added_packages)) - if install_set or dev_req_file_path: - install_packages(install_set, dev_req_file_path, python_exe) + if install_set or dev_req_file_path: + install_packages(install_set, dev_req_file_path, python_exe) - if released_packages: - pkgs_file_path = os.path.join(temp_dir, PKGS_TXT_FILE) - with open(pkgs_file_path, "w", encoding="utf-8") as pkgs_file: - for package in released_packages: - pkgs_file.write(package + "\n") - logger.info("Created file %s to track azure packages found on PyPI", pkgs_file_path) + if released_packages: + pkgs_file_path = os.path.join(temp_dir, PKGS_TXT_FILE) + with open(pkgs_file_path, "w", encoding="utf-8") as pkgs_file: + for package in released_packages: + pkgs_file.write(package + "\n") + logger.info("Created file %s to track azure packages found on PyPI", pkgs_file_path) def check_pkg_against_overrides(pkg_specifier: str) -> List[str]: - """Apply ``SPECIAL_CASE_OVERRIDES`` for a resolved package specifier.""" + """Apply ``SPECIAL_CASE_OVERRIDES`` for a resolved package specifier.""" - additional_installs: List[str] = [] - target_package, target_version = pkg_specifier.split("==") + additional_installs: List[str] = [] + target_package, target_version = pkg_specifier.split("==") - target_version_obj = Version(target_version) - if target_package in SPECIAL_CASE_OVERRIDES: - for specifier_set, extras in SPECIAL_CASE_OVERRIDES[target_package].items(): - spec = SpecifierSet(specifier_set) - if target_version_obj in spec: - additional_installs.extend(extras) + target_version_obj = Version(target_version) + if target_package in SPECIAL_CASE_OVERRIDES: + for specifier_set, extras in SPECIAL_CASE_OVERRIDES[target_package].items(): + spec = SpecifierSet(specifier_set) + if target_version_obj in spec: + additional_installs.extend(extras) - return additional_installs + return additional_installs def find_released_packages(setup_py_path: str, dependency_type: str) -> List[str]: - """Resolve the appropriate released dependency versions for a package.""" + """Resolve the appropriate released dependency versions for a package.""" - pkg_info = ParsedSetup.from_path(setup_py_path) - requires = [r for r in pkg_info.requires if "-nspkg" not in r] - available_packages = [ - spec for spec in map(lambda req: process_requirement(req, dependency_type, pkg_info.name), requires) if spec - ] - return available_packages + pkg_info = ParsedSetup.from_path(setup_py_path) + requires = [r for r in pkg_info.requires if "-nspkg" not in r] + available_packages = [ + spec for spec in map(lambda req: process_requirement(req, dependency_type, pkg_info.name), requires) if spec + ] + return available_packages def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]: - """Apply generic, platform, and package-specific bounds to the available versions list.""" - - if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES: - versions = [ - v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) - ] - - for platform_bound, restrictions in PLATFORM_SPECIFIC_MINIMUM_OVERRIDES.items(): - if compare_python_version(platform_bound) and pkg_name in restrictions: - versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])] - - if ( - originating_pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES - and pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] - ): - versions = [ - v - for v in versions - if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) - ] - - if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES: - versions = [ - v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) - ] - - for platform_bound, restrictions in PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES.items(): - if compare_python_version(platform_bound) and pkg_name in restrictions: - versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])] - - if ( - originating_pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES - and pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] - ): - versions = [ - v - for v in versions - if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) - ] - - return versions + """Apply generic, platform, and package-specific bounds to the available versions list.""" + + if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES: + versions = [v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])] + + for platform_bound, restrictions in PLATFORM_SPECIFIC_MINIMUM_OVERRIDES.items(): + if compare_python_version(platform_bound) and pkg_name in restrictions: + versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])] + + if ( + originating_pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES + and pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] + ): + versions = [ + v + for v in versions + if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + ] + + if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES: + versions = [v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])] + + for platform_bound, restrictions in PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES.items(): + if compare_python_version(platform_bound) and pkg_name in restrictions: + versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])] + + if ( + originating_pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES + and pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name] + ): + versions = [ + v + for v in versions + if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + ] + + return versions def process_requirement(req: str, dependency_type: str, orig_pkg_name: str) -> str: - """Determine the matching version for a requirement based on dependency type.""" + """Determine the matching version for a requirement based on dependency type.""" - requirement = parse_require(req) - pkg_name = requirement.name - spec = requirement.specifier if len(requirement.specifier) else None + requirement = parse_require(req) + pkg_name = requirement.name + spec = requirement.specifier if len(requirement.specifier) else None - if not (requirement.marker is None or requirement.marker.evaluate()): - logger.info( - "Skipping requirement %r. Environment marker %r does not apply to current environment.", - req, - str(requirement.marker), - ) - return "" + if not (requirement.marker is None or requirement.marker.evaluate()): + logger.info( + "Skipping requirement %r. Environment marker %r does not apply to current environment.", + req, + str(requirement.marker), + ) + return "" - client = PyPIClient() - versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)] - logger.info("Versions available on PyPI for %s: %s", pkg_name, versions) + client = PyPIClient() + versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)] + logger.info("Versions available on PyPI for %s: %s", pkg_name, versions) - versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) + versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) - if dependency_type == "Latest": - versions.reverse() + if dependency_type == "Latest": + versions.reverse() - for version in versions: - if spec is None or version in spec: - logger.info( - "Found %s version %s that matches specifier %s", - dependency_type, - version, - spec, - ) - return pkg_name + "==" + version + for version in versions: + if spec is None or version in spec: + logger.info( + "Found %s version %s that matches specifier %s", + dependency_type, + version, + spec, + ) + return pkg_name + "==" + version - logger.error("No version is found on PyPI for package %s that matches specifier %s", pkg_name, spec) - return "" + logger.error("No version is found on PyPI for package %s that matches specifier %s", pkg_name, spec) + return "" def check_req_against_exclusion(req: str, req_to_exclude: str) -> bool: - """Return ``True`` if the dev requirement matches the package slated for exclusion.""" + """Return ``True`` if the dev requirement matches the package slated for exclusion.""" - req_id = "" - for char in req: - if re.match(r"[A-Za-z0-9_-]", char): - req_id += char - else: - break + req_id = "" + for char in req: + if re.match(r"[A-Za-z0-9_-]", char): + req_id += char + else: + break - return req_id == req_to_exclude + return req_id == req_to_exclude def filter_dev_requirements( - package_directory: str, - released_packages: List[str], - temp_dir: str, - additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None, + package_directory: str, + released_packages: List[str], + temp_dir: str, + additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None, ) -> str: - """Filter dev requirements to avoid reinstalling packages we just resolved.""" + """Filter dev requirements to avoid reinstalling packages we just resolved.""" - dev_req_path = os.path.join(package_directory, DEV_REQ_FILE) - with open(dev_req_path, "r", encoding="utf-8") as dev_req_file: - requirements = dev_req_file.readlines() + dev_req_path = os.path.join(package_directory, DEV_REQ_FILE) + with open(dev_req_path, "r", encoding="utf-8") as dev_req_file: + requirements = dev_req_file.readlines() - released_packages_parsed = [parse_require(p) for p in released_packages] - released_package_names = [p.name for p in released_packages_parsed] + released_packages_parsed = [parse_require(p) for p in released_packages] + released_package_names = [p.name for p in released_packages_parsed] - prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] - req_to_exclude = [ - req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names - ] - req_to_exclude.extend(released_package_names) + prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] + req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names] + req_to_exclude.extend(released_package_names) - filtered_req = [ - req - for req in requirements - if os.path.basename(req.replace("\n", "")) not in req_to_exclude - and not any(check_req_against_exclusion(req, item) for item in req_to_exclude) - ] + filtered_req = [ + req + for req in requirements + if os.path.basename(req.replace("\n", "")) not in req_to_exclude + and not any(check_req_against_exclusion(req, item) for item in req_to_exclude) + ] - if additional_filter_fn: - filtered_req = additional_filter_fn(package_directory, filtered_req, released_packages_parsed) + if additional_filter_fn: + filtered_req = additional_filter_fn(package_directory, filtered_req, released_packages_parsed) - logger.info("Filtered dev requirements: %s", filtered_req) + logger.info("Filtered dev requirements: %s", filtered_req) - new_dev_req_path = "" - if filtered_req: - new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE) - with open(new_dev_req_path, "w", encoding="utf-8") as dev_req_file: - dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req) + new_dev_req_path = "" + if filtered_req: + new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE) + with open(new_dev_req_path, "w", encoding="utf-8") as dev_req_file: + dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req) - return new_dev_req_path + return new_dev_req_path def install_packages(packages: List[str], req_file: str, python_executable: str) -> None: - """Install resolved packages (and optionally a requirements file) into the target environment.""" + """Install resolved packages (and optionally a requirements file) into the target environment.""" - python_exe = python_executable or sys.executable - commands = get_pip_command(python_exe) - commands.append("install") + python_exe = python_executable or sys.executable + commands = get_pip_command(python_exe) + commands.append("install") - if commands[0] == "uv": - commands.extend(["--python", python_exe]) + if commands[0] == "uv": + commands.extend(["--python", python_exe]) - if packages: - commands.extend(packages) + if packages: + commands.extend(packages) - if req_file: - commands.extend(["-r", req_file]) + if req_file: + commands.extend(["-r", req_file]) - logger.info("Installing packages. Command: %s", commands) - subprocess.check_call(commands) + logger.info("Installing packages. Command: %s", commands) + subprocess.check_call(commands) From f853d8266e79d7931c026bd47e66e4d055c44f4b Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 12 Dec 2025 20:04:07 +0000 Subject: [PATCH 27/76] fix reference --- .github/workflows/azure-sdk-tools.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index d3e3d3ccb962..00b6d99ba530 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -92,7 +92,7 @@ jobs: - name: Run all discovered checks against azure-template using uv as package manager run: | - sdk_build azure-template -d $(pwd)/wheels --buildid 20250101.1 + sdk_build azure-template -d $(pwd)/wheels --build_id 20250101.1 python eng/scripts/dispatch_checks.py --checks "$AZPYSDK_CHECKS" --wheel_dir $(pwd)/wheels azure-template shell: bash env: From d985c3b590e02528daf90f060191c17ec7c87dde Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 17 Dec 2025 10:59:43 -0800 Subject: [PATCH 28/76] use InstallAndTest in devtest --- eng/tools/azure-sdk-tools/azpysdk/devtest.py | 86 ++----------------- .../azpysdk/install_and_test.py | 6 +- .../azure-sdk-tools/azpysdk/whl_no_aio.py | 2 +- 3 files changed, 12 insertions(+), 82 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/devtest.py b/eng/tools/azure-sdk-tools/azpysdk/devtest.py index a45614d77a3f..28e88622955f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/devtest.py +++ b/eng/tools/azure-sdk-tools/azpysdk/devtest.py @@ -17,6 +17,8 @@ from ci_tools.variables import discover_repo_root, set_envvar_defaults from ci_tools.logging import logger +from .install_and_test import InstallAndTest + REPO_ROOT = discover_repo_root() common_task_path = os.path.abspath(os.path.join(REPO_ROOT, "scripts", "devops_tasks")) sys.path.append(common_task_path) @@ -122,9 +124,9 @@ def install_dev_build_packages(executable: str, pkg_name_to_exclude: str, workin install_packages(executable, azure_pkgs, working_directory) -class devtest(Check): +class devtest(InstallAndTest): def __init__(self) -> None: - super().__init__() + super().__init__(package_type="sdist", proxy_url="http://localhost:5002", display_name="devtest") def register( self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None @@ -143,79 +145,7 @@ def register( help="Additional arguments forwarded to pytest.", ) - def run(self, args: argparse.Namespace) -> int: - """Run the devtest check command.""" - logger.info("Running devtest check...") - - set_envvar_defaults({"PROXY_URL": "http://localhost:5002"}) - targeted = self.get_targeted_directories(args) - - results: List[int] = [] - - for parsed in targeted: - package_dir = parsed.folder - package_name = parsed.name - executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) - logger.info(f"Processing {package_name} for devtest check") - - # install dependencies - try: - self.install_dev_reqs(executable, args, package_dir) - except CalledProcessError as e: - logger.error(f"Failed to install dev requirements: {e}") - results.append(1) - continue - - try: - create_package_and_install( - distribution_directory=staging_directory, - target_setup=package_dir, - skip_install=False, - cache_dir=None, - work_dir=staging_directory, - force_create=False, - package_type="sdist", - pre_download_disabled=False, - python_executable=executable, - ) - except CalledProcessError as e: - logger.error(f"Failed to create and install package {package_name}: {e}") - results.append(1) - continue - - if os.path.exists(TEST_TOOLS_REQUIREMENTS): - try: - install_into_venv(executable, ["-r", TEST_TOOLS_REQUIREMENTS], package_dir) - except Exception as e: - logger.error(f"Failed to install test tools requirements: {e}") - results.append(1) - continue - else: - logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") - - try: - install_dev_build_packages(executable, package_name, package_dir) - except Exception as e: - logger.error(f"Failed to install dev build packages: {e}") - results.append(1) - continue - - pytest_args = self._build_pytest_args(package_dir, args) - - pytest_result = self.run_venv_command( - executable, ["-m", "pytest", *pytest_args], cwd=package_dir, immediately_dump=True - ) - - if pytest_result.returncode != 0: - if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): - logger.info( - "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", - package_name, - ) - # Align with tox: skip coverage when tests are skipped entirely - continue - - logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") - results.append(pytest_result.returncode) - - return max(results) if results else 0 + def before_pytest( + self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace + ) -> None: + install_dev_build_packages(executable, package_name, package_dir) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 123b5cfa1d6a..026db9b06b60 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -87,7 +87,7 @@ def run(self, args: argparse.Namespace) -> int: continue try: - self.before_pytest(executable, package_dir, staging_directory, args) + self.before_pytest(executable, package_dir, package_name, staging_directory, args) except CalledProcessError as exc: logger.error(f"Pre-pytest hook failed for {package_name}: {exc}") results.append(exc.returncode or 1) @@ -151,9 +151,9 @@ def after_dependencies_installed( return None def before_pytest( - self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace + self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace ) -> None: - del executable, package_dir, staging_directory, args + del executable, package_dir, package_name, staging_directory, args return None def _install_common_requirements(self, executable: str, package_dir: str) -> None: diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py index 1c554fc1fd2f..d8531b2f96ee 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py @@ -28,7 +28,7 @@ def register( ) def before_pytest( - self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace + self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace ) -> None: uninstall_cmd = ["-m", "pip", "uninstall", "aiohttp", "--yes"] result = self.run_venv_command(executable, uninstall_cmd, cwd=package_dir) From 0c96ec320ae2ae2511a5b56e249e3697bf137f52 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 17 Dec 2025 11:02:42 -0800 Subject: [PATCH 29/76] remove unused imports from devtest --- eng/tools/azure-sdk-tools/azpysdk/devtest.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/devtest.py b/eng/tools/azure-sdk-tools/azpysdk/devtest.py index 28e88622955f..662e0ee5e5f2 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/devtest.py +++ b/eng/tools/azure-sdk-tools/azpysdk/devtest.py @@ -1,20 +1,16 @@ import argparse -from subprocess import CalledProcessError import sys import os import glob from typing import Optional, List -from .Check import Check from ci_tools.functions import ( install_into_venv, uninstall_from_venv, - is_error_code_5_allowed, discover_targeted_packages, ) -from ci_tools.scenario.generation import create_package_and_install -from ci_tools.variables import discover_repo_root, set_envvar_defaults +from ci_tools.variables import discover_repo_root from ci_tools.logging import logger from .install_and_test import InstallAndTest From 10a6d15dd84f4fa4c49f257a40a81cf2a410e6c5 Mon Sep 17 00:00:00 2001 From: jenny <63012604+JennyPng@users.noreply.github.com> Date: Fri, 19 Dec 2025 18:05:52 -0800 Subject: [PATCH 30/76] refactor install and test (#44482) * refactor install and test * refactored optional and i think it's working * minor clean * optional actually working properly * bug fix * bug fix * minor exception handling revisions --- .../azpysdk/install_and_test.py | 144 +++++++++++------- eng/tools/azure-sdk-tools/azpysdk/optional.py | 102 +++++-------- 2 files changed, 125 insertions(+), 121 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 026db9b06b60..22c451352149 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -59,31 +59,11 @@ def run(self, args: argparse.Namespace) -> int: executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) logger.info(f"Processing {package_name} using interpreter {executable}") - try: - self._install_common_requirements(executable, package_dir) - if self.should_install_dev_requirements(): - self.install_dev_reqs(executable, args, package_dir) - self.after_dependencies_installed(executable, package_dir, staging_directory, args) - except CalledProcessError as exc: - logger.error(f"Failed to prepare dependencies for {package_name}: {exc}") - results.append(exc.returncode) - continue - - try: - create_package_and_install( - distribution_directory=staging_directory, - target_setup=package_dir, - skip_install=False, - cache_dir=None, - work_dir=staging_directory, - force_create=False, - package_type=self.package_type, - pre_download_disabled=False, - python_executable=executable, - ) - except CalledProcessError as exc: - logger.error(f"Failed to build/install {self.package_type} for {package_name}: {exc}") - results.append(1) + install_result = self.install_all_requiremenmts( + executable, staging_directory, package_name, package_dir, args + ) + if install_result != 0: + results.append(install_result) continue try: @@ -94,47 +74,93 @@ def run(self, args: argparse.Namespace) -> int: continue pytest_args = self._build_pytest_args(package_dir, args) - pytest_command = ["-m", "pytest", *pytest_args] - pytest_result = self.run_venv_command( - executable, pytest_command, cwd=staging_directory, immediately_dump=True - ) - - if pytest_result.returncode != 0: - if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): - logger.info( - "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", - package_name, - ) - # Align with tox: skip coverage when tests are skipped entirely - continue - else: - results.append(pytest_result.returncode) - logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") - continue + pytest_result = self.run_pytest(executable, staging_directory, package_dir, package_name, pytest_args) + if pytest_result != 0: + results.append(pytest_result) + continue if not self.coverage_enabled: continue - coverage_command = [ - os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), - "-t", - package_dir, - "-r", - REPO_ROOT, - ] - coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) - if coverage_result.returncode != 0: - logger.error( - f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}." - ) - if coverage_result.stdout: - logger.error(coverage_result.stdout) - if coverage_result.stderr: - logger.error(coverage_result.stderr) - results.append(coverage_result.returncode) + coverage_result = self.check_coverage(executable, package_dir, package_name) + if coverage_result != 0: + results.append(coverage_result) return max(results) if results else 0 + def check_coverage(self, executable: str, package_dir: str, package_name: str) -> int: + coverage_command = [ + os.path.join(REPO_ROOT, "eng/tox/run_coverage.py"), + "-t", + package_dir, + "-r", + REPO_ROOT, + ] + coverage_result = self.run_venv_command(executable, coverage_command, cwd=package_dir) + if coverage_result.returncode != 0: + logger.error(f"Coverage generation failed for {package_name} with exit code {coverage_result.returncode}.") + if coverage_result.stdout: + logger.error(coverage_result.stdout) + if coverage_result.stderr: + logger.error(coverage_result.stderr) + return coverage_result.returncode + return 0 + + def run_pytest( + self, + executable: str, + staging_directory: str, + package_dir: str, + package_name: str, + pytest_args: List[str], + cwd: Optional[str] = None, + ) -> int: + pytest_command = ["-m", "pytest", *pytest_args] + pytest_result = self.run_venv_command( + executable, pytest_command, cwd=(cwd or staging_directory), immediately_dump=True + ) + if pytest_result.returncode != 0: + if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): + logger.info( + "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", + package_name, + ) + # Align with tox: skip coverage when tests are skipped entirely + return 0 + else: + logger.error(f"pytest failed for {package_name} with exit code {pytest_result.returncode}.") + return pytest_result.returncode + return 0 + + def install_all_requiremenmts( + self, executable: str, staging_directory: str, package_name: str, package_dir: str, args: argparse.Namespace + ) -> int: + try: + self._install_common_requirements(executable, package_dir) + if self.should_install_dev_requirements(): + self.install_dev_reqs(executable, args, package_dir) + self.after_dependencies_installed(executable, package_dir, staging_directory, args) + except CalledProcessError as exc: + logger.error(f"Failed to prepare dependencies for {package_name}: {exc}") + return exc.returncode or 1 + + try: + create_package_and_install( + distribution_directory=staging_directory, + target_setup=package_dir, + skip_install=False, + cache_dir=None, + work_dir=staging_directory, + force_create=False, + package_type=self.package_type, + pre_download_disabled=False, + python_executable=executable, + ) + except CalledProcessError as exc: + logger.error(f"Failed to build/install {self.package_type} for {package_name}: {exc}") + exit(1) + return 0 + def get_env_defaults(self) -> Dict[str, str]: defaults: Dict[str, str] = {} if self.proxy_url: diff --git a/eng/tools/azure-sdk-tools/azpysdk/optional.py b/eng/tools/azure-sdk-tools/azpysdk/optional.py index 3a873d234cca..16fb85b7889b 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/optional.py +++ b/eng/tools/azure-sdk-tools/azpysdk/optional.py @@ -5,24 +5,22 @@ from typing import Optional, List -from .Check import Check +from .install_and_test import InstallAndTest from ci_tools.functions import ( install_into_venv, uninstall_from_venv, - is_error_code_5_allowed, ) -from ci_tools.scenario.generation import create_package_and_install, prepare_environment -from ci_tools.variables import discover_repo_root, in_ci, set_envvar_defaults -from ci_tools.environment_exclusions import is_check_enabled +from ci_tools.scenario.generation import prepare_environment +from ci_tools.variables import discover_repo_root, set_envvar_defaults from ci_tools.parsing import get_config_setting from ci_tools.logging import logger REPO_ROOT = discover_repo_root() -class optional(Check): +class optional(InstallAndTest): def __init__(self) -> None: - super().__init__() + super().__init__(package_type="sdist", proxy_url="http://localhost:5004", display_name="optional") def register( self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None @@ -48,8 +46,14 @@ def run(self, args: argparse.Namespace) -> int: """Run the optional check command.""" logger.info("Running optional check...") - set_envvar_defaults({"PROXY_URL": "http://localhost:5004"}) + env_defaults = self.get_env_defaults() + if env_defaults: + set_envvar_defaults(env_defaults) + targeted = self.get_targeted_directories(args) + if not targeted: + logger.warning("No target packages discovered for optional check.") + return 0 results: List[int] = [] @@ -57,22 +61,14 @@ def run(self, args: argparse.Namespace) -> int: package_dir = parsed.folder package_name = parsed.name executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) - logger.info(f"Processing {package_name} for optional check") - - if in_ci(): - if not is_check_enabled(package_dir, "optional", False): - logger.info(f"Package {package_name} opts-out of optional check.") - continue - - try: - self.install_dev_reqs(executable, args, package_dir) - except CalledProcessError as exc: - logger.error(f"Failed to install dependencies for {package_name}: {exc}") - results.append(exc.returncode) - continue + logger.info(f"Processing {package_name} using interpreter {executable}") try: - self.prepare_and_test_optional(package_name, package_dir, staging_directory, args.optional) + result = self.prepare_and_test_optional( + package_name, package_dir, staging_directory, args.optional, args + ) + if result != 0: + results.append(result) except Exception as e: logger.error(f"Optional check for package {package_name} failed with exception: {e}") results.append(1) @@ -83,16 +79,19 @@ def run(self, args: argparse.Namespace) -> int: # TODO copying from generation.py, remove old code later # TODO remove pytest() function from ci_tools.functions as it was only used in the old version of this logic def prepare_and_test_optional( - self, package_name: str, package_dir: str, temp_dir: str, target_env_name: str - ) -> None: + self, package_name: str, package_dir: str, temp_dir: str, target_env_name: str, args: argparse.Namespace + ) -> int: """ Prepare and test the optional environment for the given package. """ optional_configs = get_config_setting(package_dir, "optional") + if not isinstance(optional_configs, list): + optional_configs = [] + if len(optional_configs) == 0: logger.info(f"No optional environments detected in pyproject.toml within {package_dir}.") - exit(0) + return 0 config_results = [] @@ -109,26 +108,18 @@ def prepare_and_test_optional( environment_exe = prepare_environment(package_dir, temp_dir, env_name) - create_package_and_install( - distribution_directory=temp_dir, - target_setup=package_dir, - skip_install=False, - cache_dir=None, - work_dir=temp_dir, - force_create=False, - package_type="sdist", - pre_download_disabled=False, - python_executable=environment_exe, - ) - dev_reqs = os.path.join(package_dir, "dev_requirements.txt") - test_tools = os.path.join(REPO_ROOT, "eng", "test_tools.txt") - - # install the dev requirements and test_tools requirements files to ensure tests can run + # install package and testing requirements try: - install_into_venv(environment_exe, ["-r", dev_reqs, "-r", test_tools], package_dir) + install_result = self.install_all_requiremenmts( + environment_exe, temp_dir, package_name, package_dir, args + ) + if install_result != 0: + logger.error(f"Failed to install base requirements for {package_name} in optional env {env_name}.") + config_results.append(False) + break except CalledProcessError as exc: logger.error( - f"Unable to complete installation of dev_requirements.txt and/or test_tools.txt for {package_name}, check command output above." + f"Failed to install base requirements for {package_name} in optional env {env_name}: {exc}" ) config_results.append(False) break @@ -181,30 +172,16 @@ def prepare_and_test_optional( logger.info(f"Invoking tests for package {package_name} and optional environment {env_name}") - pytest_command = ["-m", "pytest", *pytest_args] - pytest_result = self.run_venv_command( - environment_exe, pytest_command, cwd=package_dir, immediately_dump=True - ) - - if pytest_result.returncode != 0: - if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): - logger.info( - "pytest exited with code 5 for %s, which is allowed for management or opt-out packages.", - package_name, - ) - # Align with tox: skip coverage when tests are skipped entirely - continue - logger.error( - f"pytest failed for {package_name} and optional environment {env_name} with exit code {pytest_result.returncode}." + try: + pytest_result = self.run_pytest( + environment_exe, temp_dir, package_dir, package_name, pytest_args, cwd=package_dir ) + config_results.append(True if pytest_result == 0 else False) + except CalledProcessError as exc: config_results.append(False) - else: - logger.info(f"pytest succeeded for {package_name} and optional environment {env_name}.") - config_results.append(True) if all(config_results): logger.info(f"All optional environment(s) for {package_name} completed successfully.") - exit(0) else: for i, config in enumerate(optional_configs): if i >= len(config_results): @@ -214,4 +191,5 @@ def prepare_and_test_optional( logger.error( f"Optional environment {config_name} for {package_name} completed with non-zero exit-code. Check test results above." ) - exit(1) + return 1 + return 0 From f1494acb304ca3594ff08c238ff86be073e5fb8e Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 20 Dec 2025 02:36:19 +0000 Subject: [PATCH 31/76] undo dev chnages now that all necessary checks have migrated --- eng/pipelines/templates/steps/build-test.yml | 75 ++++++++++---------- scripts/devops_tasks/set_tox_environment.py | 14 ++-- 2 files changed, 43 insertions(+), 46 deletions(-) diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index 671013a76892..954f3f9163a9 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -146,44 +146,43 @@ steps: displayName: Report Coverage condition: and(succeeded(), ${{ parameters.RunCoverage }}) - # re-enable after migrating `samples` - # - ${{ if eq('true', parameters.UseFederatedAuth) }}: - # - task: AzurePowerShell@5 - # displayName: Test Samples (AzurePowerShell@5) - # condition: and(succeeded(), eq(variables['TestSamples'], 'true')) - # env: - # SYSTEM_ACCESSTOKEN: $(System.AccessToken) - # # Enable samples tests that use DefaultAzureCredential to load the federated pipeline credential - # AZURE_POD_IDENTITY_AUTHORITY_HOST: 'https://FakeAuthorityHost' - # ${{ insert }}: ${{ parameters.EnvVars }} - # inputs: - # azureSubscription: azure-sdk-tests-public - # azurePowerShellVersion: LatestVersion - # pwsh: true - # ScriptType: InlineScript - # Inline: | - # $account = (Get-AzContext).Account; - # $env:AZURESUBSCRIPTION_CLIENT_ID = $account.Id; - # $env:AZURESUBSCRIPTION_TENANT_ID = $account.Tenants; - - # Write-Host (Get-Command python).Source - - # python eng/scripts/dispatch_checks.py "$(TargetingString)" ` - # --service="${{ parameters.ServiceDirectory }}" ` - # --checks="samples" - - # Write-Host "Last exit code: $LASTEXITCODE"; - # exit $LASTEXITCODE; - # - ${{ else }}: - # - pwsh: | - # Write-Host (Get-Command python).Source - # python eng/scripts/dispatch_checks.py "$(TargetingString)" ` - # --service="${{ parameters.ServiceDirectory }}" ` - # --checks="samples" - # exit $LASTEXITCODE; - # env: ${{ parameters.EnvVars }} - # displayName: 'Test Samples' - # condition: and(succeeded(), eq(variables['TestSamples'], 'true')) + - ${{ if eq('true', parameters.UseFederatedAuth) }}: + - task: AzurePowerShell@5 + displayName: Test Samples (AzurePowerShell@5) + condition: and(succeeded(), eq(variables['TestSamples'], 'true')) + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + # Enable samples tests that use DefaultAzureCredential to load the federated pipeline credential + AZURE_POD_IDENTITY_AUTHORITY_HOST: 'https://FakeAuthorityHost' + ${{ insert }}: ${{ parameters.EnvVars }} + inputs: + azureSubscription: azure-sdk-tests-public + azurePowerShellVersion: LatestVersion + pwsh: true + ScriptType: InlineScript + Inline: | + $account = (Get-AzContext).Account; + $env:AZURESUBSCRIPTION_CLIENT_ID = $account.Id; + $env:AZURESUBSCRIPTION_TENANT_ID = $account.Tenants; + + Write-Host (Get-Command python).Source + + python eng/scripts/dispatch_checks.py "$(TargetingString)" ` + --service="${{ parameters.ServiceDirectory }}" ` + --checks="samples" + + Write-Host "Last exit code: $LASTEXITCODE"; + exit $LASTEXITCODE; + - ${{ else }}: + - pwsh: | + Write-Host (Get-Command python).Source + python eng/scripts/dispatch_checks.py "$(TargetingString)" ` + --service="${{ parameters.ServiceDirectory }}" ` + --checks="samples" + exit $LASTEXITCODE; + env: ${{ parameters.EnvVars }} + displayName: 'Test Samples' + condition: and(succeeded(), eq(variables['TestSamples'], 'true')) - task: PublishTestResults@2 condition: always() diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index dd879c486f50..2f930233daf4 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -20,8 +20,7 @@ "mindependency", "whl_no_aio", ] -# this branch is checking only whl for now. rest will follow as they migrate -PR_BUILD_SET = ["whl"] #, "sdist", "mindependency" +PR_BUILD_SET = ["whl", "sdist", "mindependency"] def resolve_devops_variable(var_value: str) -> List[str]: @@ -43,7 +42,7 @@ def remove_unsupported_values(selected_set: List[str], unsupported_values: List[ selected_set.remove(unsupported_tox_env) -def process_ci_skips(glob_string: str, service: str ) -> None: +def process_ci_skips(glob_string: str, service: str) -> None: checks_with_global_skip = ["pylint", "verifywhl", "verifysdist" "bandit", "mypy", "pyright", "verifytypes"] root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", "..")) @@ -65,12 +64,11 @@ def process_ci_skips(glob_string: str, service: str ) -> None: all_packages = set([os.path.basename(pkg) for pkg in targeted_packages]) set_ci_variable(f"Skip.{check[0].upper()}{check[1:]}", "true") output_ci_warning( - f"All targeted packages {all_packages} skip the {check} check. Omitting step from build.", - "set_tox_environment.py", + f"All targeted packages {all_packages} skip the {check} check. Omitting step from build.", + "set_tox_environment.py", ) - if __name__ == "__main__": parser = argparse.ArgumentParser( description="This script is used to resolve a set of arguments (that correspond to devops runtime variables) and determine which tox environments should be run for the current job. " @@ -93,14 +91,14 @@ def process_ci_skips(glob_string: str, service: str ) -> None: "-o", "--override", dest="override_set", - help="If you have a set of tox environments that should override the defaults, provide it here. In CI this is runtime variable $(Run.ToxCustomEnvs). EG: \"whl,sdist\".", + help='If you have a set of tox environments that should override the defaults, provide it here. In CI this is runtime variable $(Run.ToxCustomEnvs). EG: "whl,sdist".', ) parser.add_argument( "-u", "--unsupported", dest="unsupported", - help="A list of unsupported environments. EG: \"pylint,sdist\"", + help='A list of unsupported environments. EG: "pylint,sdist"', ) parser.add_argument( From bc66875b9756bed039ba3e534cb34c61c5838043 Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Fri, 16 Jan 2026 11:58:06 -0800 Subject: [PATCH 32/76] Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 22c451352149..b73a8c4ef59f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -59,7 +59,7 @@ def run(self, args: argparse.Namespace) -> int: executable, staging_directory = self.get_executable(args.isolate, args.command, sys.executable, package_dir) logger.info(f"Processing {package_name} using interpreter {executable}") - install_result = self.install_all_requiremenmts( + install_result = self.install_all_requirements( executable, staging_directory, package_name, package_dir, args ) if install_result != 0: From 92e9848343fbd9d8f43b29cb83aaa4d38e1c0954 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 16 Jan 2026 20:04:31 +0000 Subject: [PATCH 33/76] fix typoed function name --- eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 4 ++-- eng/tools/azure-sdk-tools/azpysdk/optional.py | 2 +- scripts/devops_tasks/set_tox_environment.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index b73a8c4ef59f..9f7859e88e14 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -132,7 +132,7 @@ def run_pytest( return pytest_result.returncode return 0 - def install_all_requiremenmts( + def install_all_requirements( self, executable: str, staging_directory: str, package_name: str, package_dir: str, args: argparse.Namespace ) -> int: try: @@ -158,7 +158,7 @@ def install_all_requiremenmts( ) except CalledProcessError as exc: logger.error(f"Failed to build/install {self.package_type} for {package_name}: {exc}") - exit(1) + return 1 return 0 def get_env_defaults(self) -> Dict[str, str]: diff --git a/eng/tools/azure-sdk-tools/azpysdk/optional.py b/eng/tools/azure-sdk-tools/azpysdk/optional.py index 16fb85b7889b..df04faa518a4 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/optional.py +++ b/eng/tools/azure-sdk-tools/azpysdk/optional.py @@ -110,7 +110,7 @@ def prepare_and_test_optional( # install package and testing requirements try: - install_result = self.install_all_requiremenmts( + install_result = self.install_all_requirements( environment_exe, temp_dir, package_name, package_dir, args ) if install_result != 0: diff --git a/scripts/devops_tasks/set_tox_environment.py b/scripts/devops_tasks/set_tox_environment.py index 2f930233daf4..e776fd780f3d 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/scripts/devops_tasks/set_tox_environment.py @@ -43,7 +43,7 @@ def remove_unsupported_values(selected_set: List[str], unsupported_values: List[ def process_ci_skips(glob_string: str, service: str) -> None: - checks_with_global_skip = ["pylint", "verifywhl", "verifysdist" "bandit", "mypy", "pyright", "verifytypes"] + checks_with_global_skip = ["pylint", "verifywhl", "verifysdist", "bandit", "mypy", "pyright", "verifytypes"] root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", "..")) if service and service != "auto": From 376a3687f4377beea38c279f14b188713e0f4d03 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 16 Jan 2026 20:31:49 +0000 Subject: [PATCH 34/76] remove build config from azure-sdk-tools and make it the default. it's just easier. --- eng/ci_tools.txt | 2 +- eng/dependency_tools.txt | 2 +- eng/pipelines/templates/jobs/ci.yml | 2 +- .../templates/stages/archetype-python-release.yml | 2 +- eng/pipelines/templates/steps/analyze.yml | 2 +- eng/pipelines/templates/steps/set-dev-build.yml | 2 +- eng/pipelines/trigger-ml-sample-pipeline.yml | 2 +- eng/regression_tools.txt | 2 +- eng/scripts/Language-Settings.ps1 | 6 +++--- eng/tools/azure-sdk-tools/azpysdk/Check.py | 2 +- eng/tools/azure-sdk-tools/pyproject.toml | 7 ++++++- .../dev_requirement_samples/relative_requirements.txt | 4 ++-- eng/tox/tox.ini | 2 +- 13 files changed, 21 insertions(+), 16 deletions(-) diff --git a/eng/ci_tools.txt b/eng/ci_tools.txt index 460c5e09f757..85779e81f7e9 100644 --- a/eng/ci_tools.txt +++ b/eng/ci_tools.txt @@ -30,4 +30,4 @@ urllib3==2.2.3 six==1.17.0 # local dev packages -./eng/tools/azure-sdk-tools[build] +./eng/tools/azure-sdk-tools diff --git a/eng/dependency_tools.txt b/eng/dependency_tools.txt index 737848994a6b..b879db0430f3 100644 --- a/eng/dependency_tools.txt +++ b/eng/dependency_tools.txt @@ -1,2 +1,2 @@ -../../../eng/tools/azure-sdk-tools[build] +../../../eng/tools/azure-sdk-tools aiohttp>=3.0; python_version >= '3.5' \ No newline at end of file diff --git a/eng/pipelines/templates/jobs/ci.yml b/eng/pipelines/templates/jobs/ci.yml index 1de53286d7ec..d55762978ac1 100644 --- a/eng/pipelines/templates/jobs/ci.yml +++ b/eng/pipelines/templates/jobs/ci.yml @@ -333,7 +333,7 @@ jobs: - pwsh: | $ErrorActionPreference = 'Stop' $PSNativeCommandUseErrorActionPreference = $true - $(PIP_EXE) install "./eng/tools/azure-sdk-tools[build]" + $(PIP_EXE) install "./eng/tools/azure-sdk-tools" displayName: 'Prep Environment' - task: PythonScript@0 displayName: 'Ensure service coverage' diff --git a/eng/pipelines/templates/stages/archetype-python-release.yml b/eng/pipelines/templates/stages/archetype-python-release.yml index 791452af113f..af224ffebe08 100644 --- a/eng/pipelines/templates/stages/archetype-python-release.yml +++ b/eng/pipelines/templates/stages/archetype-python-release.yml @@ -312,7 +312,7 @@ stages: - checkout: self - task: UsePythonVersion@0 - script: | - python -m pip install "./eng/tools/azure-sdk-tools[build]" + python -m pip install "./eng/tools/azure-sdk-tools" displayName: Install versioning tool dependencies - pwsh: | diff --git a/eng/pipelines/templates/steps/analyze.yml b/eng/pipelines/templates/steps/analyze.yml index 491b1a9e5148..21242a0bc0b5 100644 --- a/eng/pipelines/templates/steps/analyze.yml +++ b/eng/pipelines/templates/steps/analyze.yml @@ -48,7 +48,7 @@ steps: Condition: succeededOrFailed() - script: | - $(PIP_EXE) install "./eng/tools/azure-sdk-tools[build]" + $(PIP_EXE) install "./eng/tools/azure-sdk-tools" sdk_find_invalid_versions --always-succeed --service=${{parameters.ServiceDirectory}} displayName: Find Invalid Versions condition: succeededOrFailed() diff --git a/eng/pipelines/templates/steps/set-dev-build.yml b/eng/pipelines/templates/steps/set-dev-build.yml index 4030dc3e0195..4eaddf310ceb 100644 --- a/eng/pipelines/templates/steps/set-dev-build.yml +++ b/eng/pipelines/templates/steps/set-dev-build.yml @@ -9,7 +9,7 @@ steps: - template: /eng/common/pipelines/templates/steps/daily-dev-build-variable.yml - pwsh: | - $(PIP_EXE) install "eng/tools/azure-sdk-tools[build]" + $(PIP_EXE) install "eng/tools/azure-sdk-tools" sdk_set_dev_version "*" --build-id="$(Build.BuildNumber)" displayName: "Update package versions for dev build" condition: and(succeededOrFailed(), eq(variables['SetDevVersion'],'true'), ${{ parameters.Condition }}) diff --git a/eng/pipelines/trigger-ml-sample-pipeline.yml b/eng/pipelines/trigger-ml-sample-pipeline.yml index 6cc793c5e190..8187cda0644c 100644 --- a/eng/pipelines/trigger-ml-sample-pipeline.yml +++ b/eng/pipelines/trigger-ml-sample-pipeline.yml @@ -40,7 +40,7 @@ jobs: versionSpec: $(PythonVersion) - script: | - python -m pip install eng/tools/azure-sdk-tools[build] + python -m pip install eng/tools/azure-sdk-tools python -m pip install azure-identity python -m pip install azure-storage-blob displayName: 'Prep Environment' diff --git a/eng/regression_tools.txt b/eng/regression_tools.txt index ca6d669a1db5..b37ff2dab6a9 100644 --- a/eng/regression_tools.txt +++ b/eng/regression_tools.txt @@ -23,4 +23,4 @@ pytest-cov==4.0.0 coverage==7.2.5 # local dev packages -./eng/tools/azure-sdk-tools[build] +./eng/tools/azure-sdk-tools diff --git a/eng/scripts/Language-Settings.ps1 b/eng/scripts/Language-Settings.ps1 index d9832d0a852c..eb97ef225909 100644 --- a/eng/scripts/Language-Settings.ps1 +++ b/eng/scripts/Language-Settings.ps1 @@ -161,7 +161,7 @@ function Get-AllPackageInfoFromRepo ($serviceDirectory) $allPkgPropLines = $null try { - $pathToBuild = (Join-Path $RepoRoot "eng" "tools" "azure-sdk-tools[build]") + $pathToBuild = (Join-Path $RepoRoot "eng" "tools" "azure-sdk-tools") # Use ‘uv pip install’ if uv is on PATH, otherwise fall back to python -m pip if (Get-Command uv -ErrorAction SilentlyContinue) { Write-Host "Using uv pip install" @@ -419,9 +419,9 @@ function SetPackageVersion ($PackageName, $Version, $ServiceDirectory, $ReleaseD $ReleaseDate = Get-Date -Format "yyyy-MM-dd" } if (Get-Command uv -ErrorAction SilentlyContinue) { - uv pip install "$RepoRoot/eng/tools/azure-sdk-tools[build]" + uv pip install "$RepoRoot/eng/tools/azure-sdk-tools" } else { - python -m pip install "$RepoRoot/eng/tools/azure-sdk-tools[build]" -q -I + python -m pip install "$RepoRoot/eng/tools/azure-sdk-tools" -q -I } sdk_set_version --package-name $PackageName --new-version $Version ` --service $ServiceDirectory --release-date $ReleaseDate --replace-latest-entry-title $ReplaceLatestEntryTitle diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index c151c2f95aef..37207a324d31 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -100,7 +100,7 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: ) else: install_into_venv( - venv_location, [os.path.join(REPO_ROOT, "eng/tools/azure-sdk-tools[build]")], REPO_ROOT + venv_location, [os.path.join(REPO_ROOT, "eng/tools/azure-sdk-tools")], REPO_ROOT ) venv_python_exe = get_venv_python(venv_location) diff --git a/eng/tools/azure-sdk-tools/pyproject.toml b/eng/tools/azure-sdk-tools/pyproject.toml index c71c3e2de579..0d97d4835e3e 100644 --- a/eng/tools/azure-sdk-tools/pyproject.toml +++ b/eng/tools/azure-sdk-tools/pyproject.toml @@ -13,6 +13,12 @@ authors = [ urls = { "Homepage" = "https://github.com/Azure/azure-sdk-for-python" } dependencies = [ + "setuptools", + "pyparsing", + "certifi", + "cibuildwheel", + "pkginfo", + "build", "packaging", "wheel", "Jinja2", @@ -53,7 +59,6 @@ systemperf = "devtools_testutils.perfstress_tests:run_system_perfstress_tests_cm azpysdk = "azpysdk.main:main" [project.optional-dependencies] -build = ["setuptools", "pyparsing", "certifi", "cibuildwheel", "pkginfo", "build"] conda = ["beautifulsoup4"] systemperf = ["aiohttp>=3.0", "requests>=2.0", "tornado==6.0.3", "httpx>=0.21", "azure-core"] ghtools = ["GitPython", "PyGithub>=1.59.0", "requests>=2.0"] diff --git a/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt b/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt index 1c0445b205fd..1ea22b268105 100644 --- a/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt +++ b/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt @@ -6,7 +6,7 @@ tests/testserver_tests/coretestserver # random comment that should be ignored ../azure-mgmt-core -e ../azure-mgmt-core -../../../eng/tools/azure-sdk-tools[build] --e ../../../eng/tools/azure-sdk-tools[build] +../../../eng/tools/azure-sdk-tools +-e ../../../eng/tools/azure-sdk-tools -e . . \ No newline at end of file diff --git a/eng/tox/tox.ini b/eng/tox/tox.ini index a5292f184e1d..83572aa08518 100644 --- a/eng/tox/tox.ini +++ b/eng/tox/tox.ini @@ -602,5 +602,5 @@ setenv = {[testenv]setenv} PROXY_URL=http://localhost:5018 commands = - {[tox]pip_command} install {toxinidir}/../../../eng/tools/azure-sdk-tools[build] + {[tox]pip_command} install {toxinidir}/../../../eng/tools/azure-sdk-tools python {repository_root}/eng/tox/run_optional.py -t {toxinidir} --temp={envtmpdir} {posargs} From 59f013b61b64a4da6f47a6de2c7a2a39c5c17c0b Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 16 Jan 2026 20:32:46 +0000 Subject: [PATCH 35/76] simplify installation of azure/azure-sdk-tools. build is pretty much the MOST useful version, so we shouldn't make it an optional set of dependencies --- scripts/breaking_changes_checker/README.md | 2 +- sdk/ml/azure-ai-ml/dev_requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/breaking_changes_checker/README.md b/scripts/breaking_changes_checker/README.md index 15aff938e77d..7c32110c0166 100644 --- a/scripts/breaking_changes_checker/README.md +++ b/scripts/breaking_changes_checker/README.md @@ -11,7 +11,7 @@ Add your package name to the `RUN_BREAKING_CHANGES_PACKAGES` found [here](https: **1) Install azpysdk:** -`pip install -e eng/tools/azure-sdk-tools[build]` +`pip install -e eng/tools/azure-sdk-tools` **2) Run the `breaking` check.** diff --git a/sdk/ml/azure-ai-ml/dev_requirements.txt b/sdk/ml/azure-ai-ml/dev_requirements.txt index eedda6cf8025..e81ceff7abdd 100644 --- a/sdk/ml/azure-ai-ml/dev_requirements.txt +++ b/sdk/ml/azure-ai-ml/dev_requirements.txt @@ -23,3 +23,4 @@ pytest-reportlog python-dotenv azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "CPython" and python_version < "3.13" azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "PyPy" and python_version < "3.10" +pip \ No newline at end of file From e1cb7dd67862f96862d5eb5c525de677a38d03a5 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 16 Jan 2026 21:48:55 +0000 Subject: [PATCH 36/76] undo changes to ml dev_reqs. that's not the issue. fix the failing tests --- .../dev_requirement_samples/relative_requirements.txt | 4 ++-- eng/tools/azure-sdk-tools/tests/test_requirements_parse.py | 4 ++-- sdk/ml/azure-ai-ml/dev_requirements.txt | 1 - 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt b/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt index 1ea22b268105..d448b39d6fda 100644 --- a/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt +++ b/eng/tools/azure-sdk-tools/tests/integration/scenarios/dev_requirement_samples/relative_requirements.txt @@ -6,7 +6,7 @@ tests/testserver_tests/coretestserver # random comment that should be ignored ../azure-mgmt-core -e ../azure-mgmt-core -../../../eng/tools/azure-sdk-tools --e ../../../eng/tools/azure-sdk-tools +../../../eng/tools/azure-sdk-tools[conda] +-e ../../../eng/tools/azure-sdk-tools[conda] -e . . \ No newline at end of file diff --git a/eng/tools/azure-sdk-tools/tests/test_requirements_parse.py b/eng/tools/azure-sdk-tools/tests/test_requirements_parse.py index 87dd716f24c6..e73dd0cf27a7 100644 --- a/eng/tools/azure-sdk-tools/tests/test_requirements_parse.py +++ b/eng/tools/azure-sdk-tools/tests/test_requirements_parse.py @@ -79,8 +79,8 @@ def test_replace_dev_reqs_relative(tmp_directory_create): os.path.join(expected_output_folder, f"azure_identity-{identity_version}-py3-none-any.whl"), os.path.join(expected_output_folder, f"azure_mgmt_core-{mgmt_core_version}-py3-none-any.whl"), os.path.join(expected_output_folder, f"azure_mgmt_core-{mgmt_core_version}-py3-none-any.whl"), - os.path.join(expected_output_folder, f"azure_sdk_tools-{sdk_tools_version}-py3-none-any.whl[build]"), - os.path.join(expected_output_folder, f"azure_sdk_tools-{sdk_tools_version}-py3-none-any.whl[build]"), + os.path.join(expected_output_folder, f"azure_sdk_tools-{sdk_tools_version}-py3-none-any.whl[conda]"), + os.path.join(expected_output_folder, f"azure_sdk_tools-{sdk_tools_version}-py3-none-any.whl[conda]"), os.path.join(expected_output_folder, f"azure_core-{core_version}-py3-none-any.whl"), os.path.join(expected_output_folder, f"azure_core-{core_version}-py3-none-any.whl"), ] diff --git a/sdk/ml/azure-ai-ml/dev_requirements.txt b/sdk/ml/azure-ai-ml/dev_requirements.txt index e81ceff7abdd..eedda6cf8025 100644 --- a/sdk/ml/azure-ai-ml/dev_requirements.txt +++ b/sdk/ml/azure-ai-ml/dev_requirements.txt @@ -23,4 +23,3 @@ pytest-reportlog python-dotenv azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "CPython" and python_version < "3.13" azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "PyPy" and python_version < "3.10" -pip \ No newline at end of file From fd43d5ba1bc79680d0b07726e8c51cb44900ac93 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 16 Jan 2026 21:49:48 +0000 Subject: [PATCH 37/76] apply black --- eng/tools/azure-sdk-tools/azpysdk/Check.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 37207a324d31..c221dd6a19e0 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -99,9 +99,7 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: f" unable to locate prebuilt azure-sdk-tools within {wheel_dir}" ) else: - install_into_venv( - venv_location, [os.path.join(REPO_ROOT, "eng/tools/azure-sdk-tools")], REPO_ROOT - ) + install_into_venv(venv_location, [os.path.join(REPO_ROOT, "eng/tools/azure-sdk-tools")], REPO_ROOT) venv_python_exe = get_venv_python(venv_location) From cf70e04c433446f00c7bf4aef12886eb7cd883e7 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 21 Jan 2026 00:47:10 +0000 Subject: [PATCH 38/76] adjustments to PYTHONPYCACHEPREFIX --- eng/tools/azure-sdk-tools/azpysdk/Check.py | 3 +++ eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index c221dd6a19e0..35649425225a 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -127,6 +127,7 @@ def run_venv_command( check: bool = False, append_executable: bool = True, immediately_dump: bool = False, + additional_environment_settings: Optional[dict] = None, ) -> subprocess.CompletedProcess[str]: """Run a command in the given virtual environment. - Prepends the virtual environment's bin directory to the PATH environment variable (if one exists) @@ -140,6 +141,8 @@ def run_venv_command( ) env = os.environ.copy() + if additional_environment_settings: + env.update(additional_environment_settings) python_exec = pathlib.Path(executable) if python_exec.exists(): diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 9f7859e88e14..a1f496755fa8 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -116,8 +116,14 @@ def run_pytest( cwd: Optional[str] = None, ) -> int: pytest_command = ["-m", "pytest", *pytest_args] + environment = {"PYTHONPYCACHEPREFIX": staging_directory} + pytest_result = self.run_venv_command( - executable, pytest_command, cwd=(cwd or staging_directory), immediately_dump=True + executable, + pytest_command, + cwd=(cwd or staging_directory), + immediately_dump=True, + additional_environment_settings=environment, ) if pytest_result.returncode != 0: if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): From 3baf6449108c292f376adee26cb940bf55ed7a28 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Tue, 20 Jan 2026 18:36:35 -0800 Subject: [PATCH 39/76] changes to freeze --- eng/tools/azure-sdk-tools/ci_tools/functions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/eng/tools/azure-sdk-tools/ci_tools/functions.py b/eng/tools/azure-sdk-tools/ci_tools/functions.py index 5e812576d29f..afddff9c9aa2 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/functions.py @@ -599,8 +599,9 @@ def run_pip_freeze(python_executable: Optional[str] = None) -> List[str]: pip_cmd = get_pip_command(exe) + # we use `freeze` because it is present on both pip and uv out = subprocess.Popen( - pip_cmd + ["list", "--disable-pip-version-check", "--format", "freeze"], + pip_cmd + ["freeze", "--disable-pip-version-check"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) @@ -610,7 +611,7 @@ def run_pip_freeze(python_executable: Optional[str] = None) -> List[str]: collected_output = [] if stdout and (stderr is None): - for line in stdout.decode("utf-8").split(os.linesep): + for line in stdout.decode("utf-8").splitlines(): if line: collected_output.append(line) else: From a145ef23dbaf45ee520f89957ddf0dbb102e928b Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 21 Jan 2026 14:43:36 -0800 Subject: [PATCH 40/76] fix the double newline outputting on windows --- eng/scripts/dispatch_checks.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index c89086b5d536..a9586ab4d8e2 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -27,6 +27,10 @@ class CheckResult: stderr: str +def _normalize_newlines(text: str) -> str: + return text.replace("\r\n", "\n").replace("\r", "\n") + + async def run_check( semaphore: asyncio.Semaphore, package: str, @@ -82,11 +86,11 @@ async def run_check( if stdout: print(header) - print(stdout.rstrip()) + print(_normalize_newlines(stdout).rstrip()) print(trailer) if stderr: print(header.replace("OUTPUT", "STDERR")) - print(stderr.rstrip()) + print(_normalize_newlines(stderr).rstrip()) print(trailer) if in_ci(): @@ -236,14 +240,12 @@ def handler(signum, frame): if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=""" + parser = argparse.ArgumentParser(description=""" This script is the single point for all checks invoked by CI within this repo. It works in two phases. 1. Identify which packages in the repo are in scope for this script invocation, based on a glob string and a service directory. 2. Invoke one or multiple `checks` environments for each package identified as in scope. In the case of an environment invoking `pytest`, results can be collected in a junit xml file, and test markers can be selected via --mark_arg. -""" - ) +""") parser.add_argument( "glob_string", From 586e888bee8208a1d52fc7970525fefde4a5be44 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 22 Jan 2026 20:13:50 +0000 Subject: [PATCH 41/76] lets see if we can address these packaging issues --- eng/scripts/dispatch_checks.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/Check.py | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index a9586ab4d8e2..6129ad940d70 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -100,7 +100,8 @@ async def run_check( # finally, we need to clean up any temp dirs created by --isolate if in_ci(): - isolate_dir = os.path.join(package, f".venv_{check}") + package_name = os.path.basename(os.path.normpath(package)) + isolate_dir = os.path.join(root_dir, ".venv", package_name, f".venv_{check}") try: shutil.rmtree(isolate_dir) except: diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 35649425225a..cbc122dfca8f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -110,8 +110,11 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: def get_executable(self, isolate: bool, check_name: str, executable: str, package_folder: str) -> Tuple[str, str]: """Get the Python executable that should be used for this check.""" - venv_location = os.path.join(package_folder, f".venv_{check_name}") - + # Keep venvs under a shared repo-level folder to prevent nested import errors during pytest collection + package_name = os.path.basename(os.path.normpath(package_folder)) + shared_venv_root = os.path.join(REPO_ROOT, ".venv", package_name) + os.makedirs(shared_venv_root, exist_ok=True) + venv_location = os.path.join(shared_venv_root, f".venv_{check_name}") # if isolation is required, the executable we get back will align with the venv # otherwise we'll just get sys.executable and install in current executable = self.create_venv(isolate, venv_location) From ead9394fa52b9bfb632405da158a57c33c2526b4 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 22 Jan 2026 21:57:27 +0000 Subject: [PATCH 42/76] solve port issues hopefully? --- eng/scripts/dispatch_checks.py | 122 +++++++++++++++++- eng/tools/azure-sdk-tools/azpysdk/Check.py | 3 + .../azure-sdk-tools/azpysdk/proxy_ports.py | 71 ++++++++++ 3 files changed, 195 insertions(+), 1 deletion(-) create mode 100644 eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 6129ad940d70..396252c58bd8 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -5,14 +5,19 @@ import time import signal import shutil +import shlex +import subprocess +import urllib.request from dataclasses import dataclass -from typing import List +from typing import IO, List, Optional +from azpysdk.proxy_ports import get_proxy_port_for_check, get_proxy_url_for_check from ci_tools.functions import discover_targeted_packages from ci_tools.variables import in_ci from ci_tools.scenario.generation import build_whl_for_req, replace_dev_reqs from ci_tools.logging import configure_logging, logger from ci_tools.environment_exclusions import is_check_enabled, CHECK_DEFAULTS +from devtools_testutils.proxy_startup import prepare_local_tool root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) @@ -27,6 +32,110 @@ class CheckResult: stderr: str +@dataclass +class ProxyProcess: + port: int + process: subprocess.Popen + log_handle: Optional[IO[str]] + + +PROXY_STATUS_SUFFIX = "/Info/Available" +PROXY_STARTUP_TIMEOUT = 60 + + +def _proxy_status_url(port: int) -> str: + return f"http://localhost:{port}{PROXY_STATUS_SUFFIX}" + + +def _proxy_is_running(port: int) -> bool: + try: + with urllib.request.urlopen(_proxy_status_url(port), timeout=5) as resp: + return resp.status == 200 + except Exception: + return False + + +def _wait_for_proxy(port: int, timeout: int = PROXY_STARTUP_TIMEOUT) -> bool: + deadline = time.time() + timeout + while time.time() < deadline: + if _proxy_is_running(port): + return True + time.sleep(1) + return _proxy_is_running(port) + + +def _start_proxy(port: int, tool_path: str) -> ProxyProcess: + env = os.environ.copy() + log_handle: Optional[IO[str]] = None + + if in_ci(): + log_path = os.path.join(root_dir, f"_proxy_log_{port}.log") + os.makedirs(os.path.dirname(log_path), exist_ok=True) + log_handle = open(log_path, "a") + assets_folder = os.path.join(root_dir, "l", f"proxy_{port}") + os.makedirs(assets_folder, exist_ok=True) + env["PROXY_ASSETS_FOLDER"] = assets_folder + + command = shlex.split( + f'{tool_path} start --storage-location="{root_dir}" -- --urls "http://localhost:{port}"' + ) + process = subprocess.Popen( + command, + stdout=log_handle or subprocess.DEVNULL, + stderr=log_handle or subprocess.STDOUT, + env=env, + ) + + if not _wait_for_proxy(port): + process.terminate() + if log_handle: + log_handle.close() + raise RuntimeError(f"Failed to start test proxy on port {port}") + + logger.info(f"Started test proxy on port {port}") + return ProxyProcess(port=port, process=process, log_handle=log_handle) + + +def _stop_proxy_instances(instances: List[ProxyProcess]) -> None: + for instance in instances: + proc = instance.process + if proc.poll() is None: + proc.terminate() + try: + proc.wait(timeout=15) + except subprocess.TimeoutExpired: + proc.kill() + if instance.log_handle: + instance.log_handle.close() + + +def ensure_proxies_for_checks(checks: List[str]) -> List[ProxyProcess]: + ports = sorted({get_proxy_port_for_check(check) for check in checks if check}) + if not ports: + return [] + + started: List[ProxyProcess] = [] + tool_path: Optional[str] = None + + try: + for port in ports: + if _proxy_is_running(port): + logger.info(f"Test proxy already running on port {port}") + continue + if tool_path is None: + tool_path = prepare_local_tool(root_dir) + + if tool_path is None: + raise RuntimeError("Failed to prepare test proxy tool.") + started.append(_start_proxy(port, tool_path)) + except Exception: + _stop_proxy_instances(started) + raise + + os.environ["PROXY_MANUAL_START"] = "1" + return started + + def _normalize_newlines(text: str) -> str: return text.replace("\r\n", "\n").replace("\r", "\n") @@ -60,12 +169,17 @@ async def run_check( start = time.time() cmd = base_args + [check, "--isolate", package] logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") + env = os.environ.copy() + proxy_url = get_proxy_url_for_check(check) + env["PROXY_URL"] = proxy_url + env["PROXY_MANUAL_START"] = "1" try: proc = await asyncio.create_subprocess_exec( *cmd, cwd=package, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, + env=env, ) except Exception as ex: # subprocess failed to launch logger.error(f"Failed to start check {check} for {package}: {ex}") @@ -395,9 +509,15 @@ def handler(signum, frame): ) configure_interrupt_handling() + proxy_processes: List[ProxyProcess] = [] try: + if in_ci(): + logger.info(f"Ensuring {len(checks)} test proxies are running for requested checks...") + proxy_processes = ensure_proxies_for_checks(checks) exit_code = asyncio.run(run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir)) except KeyboardInterrupt: logger.error("Aborted by user.") exit_code = 130 + finally: + _stop_proxy_instances(proxy_processes) sys.exit(exit_code) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index cbc122dfca8f..ebf89138e66b 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -21,6 +21,7 @@ ) from ci_tools.variables import discover_repo_root, in_ci from ci_tools.logging import logger +from .proxy_ports import get_proxy_url_for_check # right now, we are assuming you HAVE to be in the azure-sdk-tools repo # we assume this because we don't know how a dev has installed this package, and might be @@ -110,6 +111,8 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: def get_executable(self, isolate: bool, check_name: str, executable: str, package_folder: str) -> Tuple[str, str]: """Get the Python executable that should be used for this check.""" + proxy_url = get_proxy_url_for_check(check_name) + os.environ["PROXY_URL"] = proxy_url # Keep venvs under a shared repo-level folder to prevent nested import errors during pytest collection package_name = os.path.basename(os.path.normpath(package_folder)) shared_venv_root = os.path.join(REPO_ROOT, ".venv", package_name) diff --git a/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py new file mode 100644 index 000000000000..3eaa4dddec7c --- /dev/null +++ b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py @@ -0,0 +1,71 @@ +"""Proxy port assignments for azpysdk checks. + +This mapping mirrors the explicit `PROXY_URL` configuration found in +`eng/tox/tox.ini`. Because `dispatch_checks.py` runs multiple checks in +parallel, each check must bind to its own dedicated test-proxy port to avoid +races. Keeping this data in a single module allows both the CLI and the CI +launcher to share the same source of truth without having to parse the tox +configuration file at runtime. +""" +from __future__ import annotations + +from typing import Dict, Optional + +DEFAULT_PROXY_PORT = 5000 +DEFAULT_PROXY_URL = f"http://localhost:{DEFAULT_PROXY_PORT}" + +# NOTE: `import_all` shares the same configuration as the legacy `depends` +# tox environment. All other entries match the tox environment names 1:1. +CHECK_PROXY_PORTS: Dict[str, int] = { + "import_all": 5008, + "mypy": 5003, + "next-mypy": 5020, + "pylint": 5002, + "next-pylint": 5002, + "ruff": 5022, + "pyright": 5018, + "next-pyright": 5021, + "verifytypes": 5019, + "apistub": 5014, + "verify_sdist": 5010, + "verify_whl": 5009, + "whl": DEFAULT_PROXY_PORT, + "whl_no_aio": 5004, + "sdist": 5005, + "samples": 5016, + "devtest": 5011, + "latestdependency": 5012, + "mindependency": 5013, + "bandit": 5015, + "verify_keywords": 5005, + "generate": DEFAULT_PROXY_PORT, + "breaking": 5017, + "sphinx": 5007, + "next-sphinx": 5023, + "optional": 5018, + "black": DEFAULT_PROXY_PORT, +} + + +def get_proxy_port_for_check(check_name: Optional[str]) -> int: + """Return the proxy port assigned to the given azpysdk check.""" + + if not check_name: + return DEFAULT_PROXY_PORT + return CHECK_PROXY_PORTS.get(check_name, DEFAULT_PROXY_PORT) + + +def get_proxy_url_for_check(check_name: Optional[str]) -> str: + """Return the proxy URL assigned to the given azpysdk check.""" + + port = get_proxy_port_for_check(check_name) + return f"http://localhost:{port}" + + +__all__ = [ + "CHECK_PROXY_PORTS", + "DEFAULT_PROXY_PORT", + "DEFAULT_PROXY_URL", + "get_proxy_port_for_check", + "get_proxy_url_for_check", +] From e9f6f66be7912cad02729d626c5b8ea5e4841b9c Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Thu, 22 Jan 2026 17:04:43 -0800 Subject: [PATCH 43/76] fix the issue with variables not overridding, so proxy_port wasn't getting set properly --- eng/tools/azure-sdk-tools/ci_tools/variables.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index 89dce671cd8a..e8f03f216377 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -117,8 +117,7 @@ def set_environment_from_dictionary(settings: Dict[str, str]) -> None: settings (Dict[str, str]): A dictionary of environment variable names and their default values. """ for key, value in settings.items(): - if key not in os.environ: - os.environ.setdefault(key, value) + os.environ.setdefault(key, value) def set_envvar_defaults(settings: Optional[Dict[str, str]] = None) -> None: From f392ee094aa75bf8488472cc4e20fd47601cf324 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Thu, 22 Jan 2026 17:22:11 -0800 Subject: [PATCH 44/76] ensure that proxies _should_ be properly targeted. on windows we're unable to startup for some WONKY reason --- eng/scripts/dispatch_checks.py | 4 +--- eng/tools/azure-sdk-tools/azpysdk/dependency_check.py | 11 ++++++++++- eng/tools/azure-sdk-tools/azpysdk/devtest.py | 7 ++++++- eng/tools/azure-sdk-tools/azpysdk/latestdependency.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/mindependency.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/optional.py | 7 ++++++- eng/tools/azure-sdk-tools/azpysdk/samples.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/sdist.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/whl.py | 3 ++- eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py | 3 ++- 10 files changed, 35 insertions(+), 12 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 396252c58bd8..f9c8b6ed25ad 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -11,7 +11,7 @@ from dataclasses import dataclass from typing import IO, List, Optional -from azpysdk.proxy_ports import get_proxy_port_for_check, get_proxy_url_for_check +from azpysdk.proxy_ports import get_proxy_port_for_check from ci_tools.functions import discover_targeted_packages from ci_tools.variables import in_ci from ci_tools.scenario.generation import build_whl_for_req, replace_dev_reqs @@ -170,8 +170,6 @@ async def run_check( cmd = base_args + [check, "--isolate", package] logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() - proxy_url = get_proxy_url_for_check(check) - env["PROXY_URL"] = proxy_url env["PROXY_MANUAL_START"] = "1" try: proc = await asyncio.create_subprocess_exec( diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py index c2757295e501..bc070e264435 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -5,6 +5,7 @@ from typing import Dict, List, Optional from .Check import Check, DEPENDENCY_TOOLS_REQUIREMENTS, PACKAGING_REQUIREMENTS, TEST_TOOLS_REQUIREMENTS +from .proxy_ports import get_proxy_url_for_check from ci_tools.functions import install_into_venv, is_error_code_5_allowed from ci_tools.scenario.generation import create_package_and_install @@ -28,8 +29,16 @@ def __init__( ) -> None: super().__init__() self.dependency_type = dependency_type - self.proxy_url = proxy_url self.display_name = display_name + resolved_proxy = get_proxy_url_for_check(display_name) + if proxy_url and proxy_url != resolved_proxy: + logger.debug( + "Overriding provided proxy_url %s with mapping value %s for check %s", + proxy_url, + resolved_proxy, + display_name, + ) + self.proxy_url = resolved_proxy self.additional_packages = list(additional_packages or []) def register( diff --git a/eng/tools/azure-sdk-tools/azpysdk/devtest.py b/eng/tools/azure-sdk-tools/azpysdk/devtest.py index 662e0ee5e5f2..109e88118e15 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/devtest.py +++ b/eng/tools/azure-sdk-tools/azpysdk/devtest.py @@ -14,6 +14,7 @@ from ci_tools.logging import logger from .install_and_test import InstallAndTest +from .proxy_ports import get_proxy_url_for_check REPO_ROOT = discover_repo_root() common_task_path = os.path.abspath(os.path.join(REPO_ROOT, "scripts", "devops_tasks")) @@ -122,7 +123,11 @@ def install_dev_build_packages(executable: str, pkg_name_to_exclude: str, workin class devtest(InstallAndTest): def __init__(self) -> None: - super().__init__(package_type="sdist", proxy_url="http://localhost:5002", display_name="devtest") + super().__init__( + package_type="sdist", + proxy_url=get_proxy_url_for_check("devtest"), + display_name="devtest", + ) def register( self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None diff --git a/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py index 65d22c2efa0a..15153ba163e1 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py +++ b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py @@ -2,13 +2,14 @@ from typing import List, Optional from .dependency_check import DependencyCheck +from .proxy_ports import get_proxy_url_for_check class latestdependency(DependencyCheck): def __init__(self) -> None: super().__init__( dependency_type="Latest", - proxy_url="http://localhost:5012", + proxy_url=get_proxy_url_for_check("latestdependency"), display_name="latestdependency", ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py index 994ac200334e..53f587105d44 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py +++ b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py @@ -2,13 +2,14 @@ from typing import List, Optional from .dependency_check import DependencyCheck +from .proxy_ports import get_proxy_url_for_check class mindependency(DependencyCheck): def __init__(self) -> None: super().__init__( dependency_type="Minimum", - proxy_url="http://localhost:5013", + proxy_url=get_proxy_url_for_check("mindependency"), display_name="mindependency", additional_packages=[ "azure-mgmt-keyvault<7.0.0", diff --git a/eng/tools/azure-sdk-tools/azpysdk/optional.py b/eng/tools/azure-sdk-tools/azpysdk/optional.py index df04faa518a4..df951b6a7542 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/optional.py +++ b/eng/tools/azure-sdk-tools/azpysdk/optional.py @@ -6,6 +6,7 @@ from typing import Optional, List from .install_and_test import InstallAndTest +from .proxy_ports import get_proxy_url_for_check from ci_tools.functions import ( install_into_venv, uninstall_from_venv, @@ -20,7 +21,11 @@ class optional(InstallAndTest): def __init__(self) -> None: - super().__init__(package_type="sdist", proxy_url="http://localhost:5004", display_name="optional") + super().__init__( + package_type="sdist", + proxy_url=get_proxy_url_for_check("optional"), + display_name="optional", + ) def register( self, subparsers: "argparse._SubParsersAction", parent_parsers: Optional[List[argparse.ArgumentParser]] = None diff --git a/eng/tools/azure-sdk-tools/azpysdk/samples.py b/eng/tools/azure-sdk-tools/azpysdk/samples.py index 24a476161c52..f80cece0bf5f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/samples.py +++ b/eng/tools/azure-sdk-tools/azpysdk/samples.py @@ -6,6 +6,7 @@ from typing import Optional, List from .Check import Check +from .proxy_ports import get_proxy_url_for_check from ci_tools.functions import install_into_venv from ci_tools.scenario.generation import create_package_and_install from ci_tools.variables import discover_repo_root, set_envvar_defaults @@ -302,7 +303,7 @@ def run(self, args: argparse.Namespace) -> int: """Run the samples check command.""" logger.info("Running samples check...") - set_envvar_defaults({"PROXY_URL": "http://localhost:5003"}) + set_envvar_defaults({"PROXY_URL": get_proxy_url_for_check(args.command)}) targeted = self.get_targeted_directories(args) results: List[int] = [] diff --git a/eng/tools/azure-sdk-tools/azpysdk/sdist.py b/eng/tools/azure-sdk-tools/azpysdk/sdist.py index 761473ba1f80..8f846db66311 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/sdist.py +++ b/eng/tools/azure-sdk-tools/azpysdk/sdist.py @@ -2,13 +2,14 @@ from typing import List, Optional from .install_and_test import InstallAndTest +from .proxy_ports import get_proxy_url_for_check class sdist(InstallAndTest): def __init__(self) -> None: super().__init__( package_type="sdist", - proxy_url="http://localhost:5005", + proxy_url=get_proxy_url_for_check("sdist"), display_name="sdist", ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index 2eed20071f4a..d710067a0657 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -2,13 +2,14 @@ from typing import List, Optional from .install_and_test import InstallAndTest +from .proxy_ports import get_proxy_url_for_check class whl(InstallAndTest): def __init__(self) -> None: super().__init__( package_type="wheel", - proxy_url="http://localhost:5001", + proxy_url=get_proxy_url_for_check("whl"), display_name="whl", ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py index d8531b2f96ee..5ba732fd11e0 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py @@ -2,6 +2,7 @@ from typing import List, Optional from .install_and_test import InstallAndTest +from .proxy_ports import get_proxy_url_for_check from ci_tools.logging import logger @@ -9,7 +10,7 @@ class whl_no_aio(InstallAndTest): def __init__(self) -> None: super().__init__( package_type="wheel", - proxy_url="http://localhost:5004", + proxy_url=get_proxy_url_for_check("whl_no_aio"), display_name="whl_no_aio", ) From 55863a5505d4a6a2c102a1cf4031025caaf21273 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Thu, 22 Jan 2026 17:43:50 -0800 Subject: [PATCH 45/76] simpler class errors --- eng/scripts/dispatch_checks.py | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index f9c8b6ed25ad..7a7e9c2aaec1 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -64,6 +64,15 @@ def _wait_for_proxy(port: int, timeout: int = PROXY_STARTUP_TIMEOUT) -> bool: return _proxy_is_running(port) +def _wait_for_proxy_shutdown(port: int, timeout: int = 15) -> bool: + deadline = time.time() + timeout + while time.time() < deadline: + if not _proxy_is_running(port): + return True + time.sleep(0.5) + return not _proxy_is_running(port) + + def _start_proxy(port: int, tool_path: str) -> ProxyProcess: env = os.environ.copy() log_handle: Optional[IO[str]] = None @@ -79,11 +88,14 @@ def _start_proxy(port: int, tool_path: str) -> ProxyProcess: command = shlex.split( f'{tool_path} start --storage-location="{root_dir}" -- --urls "http://localhost:{port}"' ) + creationflags = subprocess.CREATE_NEW_PROCESS_GROUP if os.name == "nt" else 0 + process = subprocess.Popen( command, stdout=log_handle or subprocess.DEVNULL, stderr=log_handle or subprocess.STDOUT, env=env, + creationflags=creationflags, ) if not _wait_for_proxy(port): @@ -100,13 +112,29 @@ def _stop_proxy_instances(instances: List[ProxyProcess]) -> None: for instance in instances: proc = instance.process if proc.poll() is None: - proc.terminate() try: - proc.wait(timeout=15) + if os.name == "nt" and hasattr(signal, "CTRL_BREAK_EVENT"): + proc.send_signal(signal.CTRL_BREAK_EVENT) + else: + proc.send_signal(signal.SIGINT) + proc.wait(timeout=20) + except (ProcessLookupError, PermissionError): + pass + except (ValueError, OSError): + pass except subprocess.TimeoutExpired: - proc.kill() + proc.terminate() + try: + proc.wait(timeout=10) + except subprocess.TimeoutExpired: + proc.kill() + except Exception: + proc.terminate() if instance.log_handle: instance.log_handle.close() + if _proxy_is_running(instance.port): + if not _wait_for_proxy_shutdown(instance.port, timeout=10): + logger.warning(f"Test proxy on port {instance.port} did not stop cleanly.") def ensure_proxies_for_checks(checks: List[str]) -> List[ProxyProcess]: From 80e32fc830de155ec5bf75f91d891ce339dcd7e3 Mon Sep 17 00:00:00 2001 From: mccoyp Date: Mon, 26 Jan 2026 20:02:26 +0000 Subject: [PATCH 46/76] Use Python's flask --- sdk/core/azure-core/tests/async_tests/conftest.py | 2 +- sdk/core/azure-core/tests/conftest.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/sdk/core/azure-core/tests/async_tests/conftest.py b/sdk/core/azure-core/tests/async_tests/conftest.py index 6db03141b52c..59cdfe4a3674 100644 --- a/sdk/core/azure-core/tests/async_tests/conftest.py +++ b/sdk/core/azure-core/tests/async_tests/conftest.py @@ -60,7 +60,7 @@ def start_testserver(): port = get_port() os.environ["FLASK_APP"] = "coretestserver" os.environ["FLASK_PORT"] = str(port) - cmd = "flask run -p {}".format(port) + cmd = f"{sys.executable} -m flask run -p {port}" if os.name == "nt": # On windows, subprocess creation works without being in the shell child_process = subprocess.Popen(cmd, env=dict(os.environ)) else: diff --git a/sdk/core/azure-core/tests/conftest.py b/sdk/core/azure-core/tests/conftest.py index e9a947c796ca..0d9ebb6c8c24 100644 --- a/sdk/core/azure-core/tests/conftest.py +++ b/sdk/core/azure-core/tests/conftest.py @@ -30,6 +30,7 @@ import subprocess import random import platform +import sys import urllib from typing import Generator @@ -74,7 +75,7 @@ def start_testserver(): # to set these additional env vars for pypy os.environ["LC_ALL"] = "C.UTF-8" os.environ["LANG"] = "C.UTF-8" - cmd = "flask run -p {}".format(port) + cmd = f"{sys.executable} -m flask run -p {port}" if os.name == "nt": # On windows, subprocess creation works without being in the shell child_process = subprocess.Popen(cmd, env=dict(os.environ)) else: From c35c7ceeb6140aad649d76fb6278ae9ff01f7118 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 01:45:42 +0000 Subject: [PATCH 47/76] fix the filewatch issue plaguing windows --- eng/scripts/dispatch_checks.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 7a7e9c2aaec1..7905d74c3b21 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -20,6 +20,7 @@ from devtools_testutils.proxy_startup import prepare_local_tool root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) +ISOLATE_DIRS_TO_CLEAN: List[str] = [] @dataclass @@ -137,6 +138,21 @@ def _stop_proxy_instances(instances: List[ProxyProcess]) -> None: logger.warning(f"Test proxy on port {instance.port} did not stop cleanly.") +def _cleanup_isolate_dirs() -> None: + if not ISOLATE_DIRS_TO_CLEAN: + return + + for path in ISOLATE_DIRS_TO_CLEAN: + if not path: + continue + if os.path.exists(path): + try: + shutil.rmtree(path) + except Exception: + logger.warning(f"Failed to remove isolate dir {path}") + ISOLATE_DIRS_TO_CLEAN.clear() + + def ensure_proxies_for_checks(checks: List[str]) -> List[ProxyProcess]: ports = sorted({get_proxy_port_for_check(check) for check in checks if check}) if not ports: @@ -242,10 +258,7 @@ async def run_check( if in_ci(): package_name = os.path.basename(os.path.normpath(package)) isolate_dir = os.path.join(root_dir, ".venv", package_name, f".venv_{check}") - try: - shutil.rmtree(isolate_dir) - except: - logger.warning(f"Failed to remove isolate dir {isolate_dir} for {package} / {check}") + ISOLATE_DIRS_TO_CLEAN.append(isolate_dir) return CheckResult(package, check, exit_code, duration, stdout, stderr) @@ -546,4 +559,5 @@ def handler(signum, frame): exit_code = 130 finally: _stop_proxy_instances(proxy_processes) + _cleanup_isolate_dirs() sys.exit(exit_code) From dfdc6b2a5db3e9cb3566ea7951179c01ff370bba Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 02:45:29 +0000 Subject: [PATCH 48/76] when files change under the default root of the proxy, we don't want it to shut down --- eng/scripts/dispatch_checks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 7905d74c3b21..02033798bb92 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -85,6 +85,7 @@ def _start_proxy(port: int, tool_path: str) -> ProxyProcess: assets_folder = os.path.join(root_dir, "l", f"proxy_{port}") os.makedirs(assets_folder, exist_ok=True) env["PROXY_ASSETS_FOLDER"] = assets_folder + env["DOTNET_HOSTBUILDER__RELOADCONFIGONCHANGE"] = "false" command = shlex.split( f'{tool_path} start --storage-location="{root_dir}" -- --urls "http://localhost:{port}"' From a7f4a6a8d988d2bbf8bf2186faa21460d59b391d Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 05:14:52 +0000 Subject: [PATCH 49/76] we are now sensitive to which proxy is which now. this _should_ repair aazure-core --- eng/scripts/dispatch_checks.py | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 02033798bb92..1f8724ade3d6 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -42,6 +42,7 @@ class ProxyProcess: PROXY_STATUS_SUFFIX = "/Info/Available" PROXY_STARTUP_TIMEOUT = 60 +BASE_PROXY_PORT = 5000 def _proxy_status_url(port: int) -> str: @@ -192,6 +193,7 @@ async def run_check( base_args: List[str], idx: int, total: int, + proxy_port: int, ) -> CheckResult: """Run a single check (subprocess) within a concurrency semaphore, capturing output and timing. @@ -207,6 +209,8 @@ async def run_check( :type idx: int :param total: Total number of tasks (used for logging progress). :type total: int + :param proxy_port: Dedicated proxy port assigned to this check instance. + :type proxy_port: int :returns: A :class:`CheckResult` describing exit code, duration and captured output. :rtype: CheckResult """ @@ -216,6 +220,7 @@ async def run_check( logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() env["PROXY_MANUAL_START"] = "1" + env["PROXY_URL"] = f"http://localhost:{proxy_port}" try: proc = await asyncio.create_subprocess_exec( *cmd, @@ -309,7 +314,7 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): tasks = [] semaphore = asyncio.Semaphore(max_parallel) combos = [(p, c) for p in packages for c in checks] - total = len(combos) + scheduled: List[tuple] = [] test_tools_path = os.path.join(root_dir, "eng", "test_tools.txt") dependency_tools_path = os.path.join(root_dir, "eng", "dependency_tools.txt") @@ -332,11 +337,22 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): replace_dev_reqs(destination_dev_req, pkg, wheel_dir) - for idx, (package, check) in enumerate(combos, start=1): + next_proxy_port = BASE_PROXY_PORT + for package, check in combos: if not is_check_enabled(package, check, CHECK_DEFAULTS.get(check, True)): - logger.warning(f"Skipping disabled check {check} ({idx}/{total}) for package {package}") + logger.warning(f"Skipping disabled check {check} for package {package}") continue - tasks.append(asyncio.create_task(run_check(semaphore, package, check, base_args, idx, total))) + scheduled.append((package, check, next_proxy_port)) + next_proxy_port += 1 + + total = len(scheduled) + + for idx, (package, check, proxy_port) in enumerate(scheduled, start=1): + tasks.append( + asyncio.create_task( + run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port) + ) + ) # Handle Ctrl+C gracefully pending = set(tasks) @@ -349,7 +365,7 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): raise # Normalize exceptions norm_results: List[CheckResult] = [] - for res, (package, check) in zip(results, combos): + for res, (package, check, _) in zip(results, scheduled): if isinstance(res, CheckResult): norm_results.append(res) elif isinstance(res, Exception): @@ -553,12 +569,12 @@ def handler(signum, frame): try: if in_ci(): logger.info(f"Ensuring {len(checks)} test proxies are running for requested checks...") - proxy_processes = ensure_proxies_for_checks(checks) + # proxy_processes = ensure_proxies_for_checks(checks) exit_code = asyncio.run(run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir)) except KeyboardInterrupt: logger.error("Aborted by user.") exit_code = 130 finally: - _stop_proxy_instances(proxy_processes) + # _stop_proxy_instances(proxy_processes) _cleanup_isolate_dirs() sys.exit(exit_code) From ba87dac49870b80257e56f64b47787827fa55c35 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 05:45:01 +0000 Subject: [PATCH 50/76] let them start --- eng/scripts/dispatch_checks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 1f8724ade3d6..77cc9e4ea5e0 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -219,7 +219,6 @@ async def run_check( cmd = base_args + [check, "--isolate", package] logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() - env["PROXY_MANUAL_START"] = "1" env["PROXY_URL"] = f"http://localhost:{proxy_port}" try: proc = await asyncio.create_subprocess_exec( From 9687468107474514e31eab49111bd02b5ee82c4c Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 18:25:55 +0000 Subject: [PATCH 51/76] further simplification of the proxyy startup code --- eng/scripts/dispatch_checks.py | 164 +++++------------- .../devtools_testutils/proxy_startup.py | 46 +++-- 2 files changed, 76 insertions(+), 134 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 77cc9e4ea5e0..6b9458b2fc0e 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -43,102 +43,8 @@ class ProxyProcess: PROXY_STATUS_SUFFIX = "/Info/Available" PROXY_STARTUP_TIMEOUT = 60 BASE_PROXY_PORT = 5000 - - -def _proxy_status_url(port: int) -> str: - return f"http://localhost:{port}{PROXY_STATUS_SUFFIX}" - - -def _proxy_is_running(port: int) -> bool: - try: - with urllib.request.urlopen(_proxy_status_url(port), timeout=5) as resp: - return resp.status == 200 - except Exception: - return False - - -def _wait_for_proxy(port: int, timeout: int = PROXY_STARTUP_TIMEOUT) -> bool: - deadline = time.time() + timeout - while time.time() < deadline: - if _proxy_is_running(port): - return True - time.sleep(1) - return _proxy_is_running(port) - - -def _wait_for_proxy_shutdown(port: int, timeout: int = 15) -> bool: - deadline = time.time() + timeout - while time.time() < deadline: - if not _proxy_is_running(port): - return True - time.sleep(0.5) - return not _proxy_is_running(port) - - -def _start_proxy(port: int, tool_path: str) -> ProxyProcess: - env = os.environ.copy() - log_handle: Optional[IO[str]] = None - - if in_ci(): - log_path = os.path.join(root_dir, f"_proxy_log_{port}.log") - os.makedirs(os.path.dirname(log_path), exist_ok=True) - log_handle = open(log_path, "a") - assets_folder = os.path.join(root_dir, "l", f"proxy_{port}") - os.makedirs(assets_folder, exist_ok=True) - env["PROXY_ASSETS_FOLDER"] = assets_folder - env["DOTNET_HOSTBUILDER__RELOADCONFIGONCHANGE"] = "false" - - command = shlex.split( - f'{tool_path} start --storage-location="{root_dir}" -- --urls "http://localhost:{port}"' - ) - creationflags = subprocess.CREATE_NEW_PROCESS_GROUP if os.name == "nt" else 0 - - process = subprocess.Popen( - command, - stdout=log_handle or subprocess.DEVNULL, - stderr=log_handle or subprocess.STDOUT, - env=env, - creationflags=creationflags, - ) - - if not _wait_for_proxy(port): - process.terminate() - if log_handle: - log_handle.close() - raise RuntimeError(f"Failed to start test proxy on port {port}") - - logger.info(f"Started test proxy on port {port}") - return ProxyProcess(port=port, process=process, log_handle=log_handle) - - -def _stop_proxy_instances(instances: List[ProxyProcess]) -> None: - for instance in instances: - proc = instance.process - if proc.poll() is None: - try: - if os.name == "nt" and hasattr(signal, "CTRL_BREAK_EVENT"): - proc.send_signal(signal.CTRL_BREAK_EVENT) - else: - proc.send_signal(signal.SIGINT) - proc.wait(timeout=20) - except (ProcessLookupError, PermissionError): - pass - except (ValueError, OSError): - pass - except subprocess.TimeoutExpired: - proc.terminate() - try: - proc.wait(timeout=10) - except subprocess.TimeoutExpired: - proc.kill() - except Exception: - proc.terminate() - if instance.log_handle: - instance.log_handle.close() - if _proxy_is_running(instance.port): - if not _wait_for_proxy_shutdown(instance.port, timeout=10): - logger.warning(f"Test proxy on port {instance.port} did not stop cleanly.") - +# Checks implemented via InstallAndTest all require shared recording restore behavior. +INSTALL_AND_TEST_CHECKS = {"whl", "whl_no_aio", "sdist", "devtest", "optional", "latestdependency", "mindependency"} def _cleanup_isolate_dirs() -> None: if not ISOLATE_DIRS_TO_CLEAN: @@ -155,35 +61,36 @@ def _cleanup_isolate_dirs() -> None: ISOLATE_DIRS_TO_CLEAN.clear() -def ensure_proxies_for_checks(checks: List[str]) -> List[ProxyProcess]: - ports = sorted({get_proxy_port_for_check(check) for check in checks if check}) - if not ports: - return [] - - started: List[ProxyProcess] = [] - tool_path: Optional[str] = None +def _normalize_newlines(text: str) -> str: + return text.replace("\r\n", "\n").replace("\r", "\n") - try: - for port in ports: - if _proxy_is_running(port): - logger.info(f"Test proxy already running on port {port}") - continue - if tool_path is None: - tool_path = prepare_local_tool(root_dir) - - if tool_path is None: - raise RuntimeError("Failed to prepare test proxy tool.") - started.append(_start_proxy(port, tool_path)) - except Exception: - _stop_proxy_instances(started) - raise - os.environ["PROXY_MANUAL_START"] = "1" - return started +def _checks_require_recording_restore(checks: List[str]) -> bool: + return any(check in INSTALL_AND_TEST_CHECKS for check in checks) -def _normalize_newlines(text: str) -> str: - return text.replace("\r\n", "\n").replace("\r", "\n") +def _restore_package_recordings(packages: List[str], proxy_executable: str) -> None: + unique_packages = list(dict.fromkeys(packages)) + for package in unique_packages: + assets_path = os.path.join(package, "assets.json") + if not os.path.exists(assets_path): + logger.debug(f"No assets.json found under {package}; skipping restore.") + continue + cmd = [proxy_executable, "restore", "-a", assets_path] + logger.info(f"Restoring recordings for {package} using assets file {assets_path}.") + try: + completed = subprocess.run(cmd, capture_output=True, text=True, check=True) + if completed.stdout: + logger.debug(completed.stdout.strip()) + if completed.stderr: + logger.debug(completed.stderr.strip()) + except subprocess.CalledProcessError as exc: + logger.error(f"Failed to restore recordings for {package}: {exc}") + if exc.stdout: + logger.error(exc.stdout.strip()) + if exc.stderr: + logger.error(exc.stderr.strip()) + raise async def run_check( @@ -559,6 +466,19 @@ def handler(signum, frame): logger.error("No valid checks provided via -c/--checks.") sys.exit(2) + if in_ci() and _checks_require_recording_restore(checks): + try: + proxy_executable = prepare_local_tool(root_dir) + except Exception as exc: + logger.error(f"Unable to prepare test proxy executable for recording restore: {exc}") + sys.exit(1) + + try: + _restore_package_recordings(targeted_packages, proxy_executable) + except subprocess.CalledProcessError: + logger.error("Recording restore pre-pass failed; aborting check execution.") + sys.exit(1) + logger.info( f"Running {len(checks)} check(s) across {len(targeted_packages)} packages (max_parallel={args.max_parallel})." ) @@ -568,12 +488,10 @@ def handler(signum, frame): try: if in_ci(): logger.info(f"Ensuring {len(checks)} test proxies are running for requested checks...") - # proxy_processes = ensure_proxies_for_checks(checks) exit_code = asyncio.run(run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir)) except KeyboardInterrupt: logger.error("Aborted by user.") exit_code = 130 finally: - # _stop_proxy_instances(proxy_processes) _cleanup_isolate_dirs() sys.exit(exit_code) diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index ac5f5bdc3429..72a308601f81 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -19,6 +19,7 @@ from dotenv import load_dotenv, find_dotenv import pytest import subprocess +from urllib.parse import urlparse from urllib3.exceptions import SSLError from ci_tools.variables import in_ci @@ -97,6 +98,28 @@ discovered_roots = [] +def _get_proxy_log_suffix() -> str: + """Derive a log suffix based on the configured proxy port.""" + + proxy_url = os.getenv("PROXY_URL", PROXY_URL) + normalized = proxy_url if "://" in proxy_url else f"http://{proxy_url}" + try: + parsed = urlparse(normalized) + except Exception as exc: # pragma: no cover - defensive parsing guard + _LOGGER.debug("Unable to parse PROXY_URL '%s': %s", proxy_url, exc) + return "default" + + if parsed.port: + return str(parsed.port) + + if parsed.netloc and ":" in parsed.netloc: + candidate = parsed.netloc.rsplit(":", 1)[-1] + if candidate.isdigit(): + return candidate + + return "default" + + def get_target_version(repo_root: str) -> str: """Gets the target test-proxy version from the target_version.txt file in /eng/common/testproxy""" version_file_location = os.path.relpath("eng/common/testproxy/target_version.txt") @@ -362,19 +385,14 @@ def start_test_proxy(request) -> None: # If we're in CI, allow for tox environment parallelization and write proxy output to a log file log = None if in_ci(): - envname = os.getenv("TOX_ENV_NAME", "default") - log = open(os.path.join(root, "_proxy_log_{}.log".format(envname)), "a") + log_suffix = _get_proxy_log_suffix() + log = open(os.path.join(root, f"_proxy_log_{log_suffix}.log"), "a") - os.environ["PROXY_ASSETS_FOLDER"] = os.path.join(root, "l", envname) - if not os.path.exists(os.environ["PROXY_ASSETS_FOLDER"]): - os.makedirs(os.environ["PROXY_ASSETS_FOLDER"]) + # os.environ["PROXY_ASSETS_FOLDER"] = os.path.join(root, "l", log_suffix) + # if not os.path.exists(os.environ["PROXY_ASSETS_FOLDER"]): + # os.makedirs(os.environ["PROXY_ASSETS_FOLDER"]) - if os.getenv("TF_BUILD"): - _LOGGER.info("Starting the test proxy tool from dotnet tool cache...") - tool_name = "test-proxy" - else: - _LOGGER.info("Downloading and starting standalone proxy executable...") - tool_name = prepare_local_tool(root) + tool_name = prepare_local_tool(root) if requires_https: # Always start the proxy with these two defaults set to allow SSL connection @@ -387,6 +405,12 @@ def start_test_proxy(request) -> None: else: passenv = {} + # When the proxy is started in context of a file, deletions of files under that directory crashes the test-proxy + # due to how asp.net kestrel loads configuration files. We can disable this behavior by setting this environment + # variable in env for the proxy process, which will allow us to clean up the --isolate directories without crashing + # running proxies. + passenv["DOTNET_HOSTBUILDER__RELOADCONFIGONCHANGE"] = "false" + # If they are already set, override what we give the proxy with what is in os.environ passenv.update(os.environ) From 39f30a376a8a3ad70046a3eba4f8c94137c21163 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 19:20:10 +0000 Subject: [PATCH 52/76] fix formatting. fix error with azure-template attempting to restore a nonexistent tag. --- eng/scripts/dispatch_checks.py | 16 ++++++++-------- eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py | 1 + 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 6b9458b2fc0e..ca52d93d5ae4 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -46,6 +46,7 @@ class ProxyProcess: # Checks implemented via InstallAndTest all require shared recording restore behavior. INSTALL_AND_TEST_CHECKS = {"whl", "whl_no_aio", "sdist", "devtest", "optional", "latestdependency", "mindependency"} + def _cleanup_isolate_dirs() -> None: if not ISOLATE_DIRS_TO_CLEAN: return @@ -70,7 +71,8 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: def _restore_package_recordings(packages: List[str], proxy_executable: str) -> None: - unique_packages = list(dict.fromkeys(packages)) + # azure template has a fake tag for demonstration purposes, skip restore on that one. + unique_packages = [package for package in list(dict.fromkeys(packages)) if package != "azure-template"] for package in unique_packages: assets_path = os.path.join(package, "assets.json") if not os.path.exists(assets_path): @@ -254,11 +256,7 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): total = len(scheduled) for idx, (package, check, proxy_port) in enumerate(scheduled, start=1): - tasks.append( - asyncio.create_task( - run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port) - ) - ) + tasks.append(asyncio.create_task(run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port))) # Handle Ctrl+C gracefully pending = set(tasks) @@ -317,12 +315,14 @@ def handler(signum, frame): if __name__ == "__main__": - parser = argparse.ArgumentParser(description=""" + parser = argparse.ArgumentParser( + description=""" This script is the single point for all checks invoked by CI within this repo. It works in two phases. 1. Identify which packages in the repo are in scope for this script invocation, based on a glob string and a service directory. 2. Invoke one or multiple `checks` environments for each package identified as in scope. In the case of an environment invoking `pytest`, results can be collected in a junit xml file, and test markers can be selected via --mark_arg. -""") +""" + ) parser.add_argument( "glob_string", diff --git a/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py index 3eaa4dddec7c..59f0c9777a01 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py +++ b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py @@ -7,6 +7,7 @@ launcher to share the same source of truth without having to parse the tox configuration file at runtime. """ + from __future__ import annotations from typing import Dict, Optional From 881f1abd1a0b33dec6b06671973bdacefa151750 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 19:40:48 +0000 Subject: [PATCH 53/76] a'zure-sdk-tools' doesn't have a build config yet --- scripts/automation_init.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/automation_init.sh b/scripts/automation_init.sh index 308bcf24aee4..6d444c097e1c 100644 --- a/scripts/automation_init.sh +++ b/scripts/automation_init.sh @@ -2,7 +2,7 @@ # init env python -m pip install -U pip > /dev/null -python -m pip install eng/tools/azure-sdk-tools[build,ghtools,sdkgenerator] > /dev/null +python -m pip install eng/tools/azure-sdk-tools[ghtools,sdkgenerator] > /dev/null # install tsp-client echo Install tsp-client From 272b61bb53915ba736d2470c8eb19e4d44ebdc5a Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Wed, 28 Jan 2026 19:42:52 +0000 Subject: [PATCH 54/76] azure-template wasn't being excluded from restore properly --- eng/scripts/dispatch_checks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index ca52d93d5ae4..8d9a0c054c75 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -72,7 +72,8 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: def _restore_package_recordings(packages: List[str], proxy_executable: str) -> None: # azure template has a fake tag for demonstration purposes, skip restore on that one. - unique_packages = [package for package in list(dict.fromkeys(packages)) if package != "azure-template"] + unique_packages = [package for package in list(dict.fromkeys(packages)) if "azure-template" not in package] + for package in unique_packages: assets_path = os.path.join(package, "assets.json") if not os.path.exists(assets_path): From 577eeac74adbcdb7d7930f381d48974346010253 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 29 Jan 2026 00:23:07 +0000 Subject: [PATCH 55/76] lets try this --- eng/scripts/dispatch_checks.py | 43 +++++++++++++++++-- .../azure-sdk-tools/ci_tools/variables.py | 15 +++++++ .../devtools_testutils/proxy_startup.py | 15 ++++--- 3 files changed, 62 insertions(+), 11 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 8d9a0c054c75..440c113fbe31 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -13,7 +13,7 @@ from azpysdk.proxy_ports import get_proxy_port_for_check from ci_tools.functions import discover_targeted_packages -from ci_tools.variables import in_ci +from ci_tools.variables import in_ci, get_assets_directory from ci_tools.scenario.generation import build_whl_for_req, replace_dev_reqs from ci_tools.logging import configure_logging, logger from ci_tools.environment_exclusions import is_check_enabled, CHECK_DEFAULTS @@ -45,6 +45,7 @@ class ProxyProcess: BASE_PROXY_PORT = 5000 # Checks implemented via InstallAndTest all require shared recording restore behavior. INSTALL_AND_TEST_CHECKS = {"whl", "whl_no_aio", "sdist", "devtest", "optional", "latestdependency", "mindependency"} +SHARED_RESTORE_ENV = "__shared_restore__" def _cleanup_isolate_dirs() -> None: @@ -70,7 +71,9 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: return any(check in INSTALL_AND_TEST_CHECKS for check in checks) -def _restore_package_recordings(packages: List[str], proxy_executable: str) -> None: +def _restore_package_recordings( + packages: List[str], proxy_executable: str, assets_dir: Optional[str] = None +) -> None: # azure template has a fake tag for demonstration purposes, skip restore on that one. unique_packages = [package for package in list(dict.fromkeys(packages)) if "azure-template" not in package] @@ -82,7 +85,10 @@ def _restore_package_recordings(packages: List[str], proxy_executable: str) -> N cmd = [proxy_executable, "restore", "-a", assets_path] logger.info(f"Restoring recordings for {package} using assets file {assets_path}.") try: - completed = subprocess.run(cmd, capture_output=True, text=True, check=True) + env = os.environ.copy() + if assets_dir: + env["PROXY_ASSETS_FOLDER"] = assets_dir + completed = subprocess.run(cmd, capture_output=True, text=True, check=True, env=env) if completed.stdout: logger.debug(completed.stdout.strip()) if completed.stderr: @@ -96,6 +102,31 @@ def _restore_package_recordings(packages: List[str], proxy_executable: str) -> N raise +def _prepopulate_check_asset_dirs(base_assets_dir: str, checks: List[str]) -> None: + if not in_ci(): + return + + if not base_assets_dir or not os.path.exists(base_assets_dir): + logger.warning( + f"Base assets directory {base_assets_dir} missing; skipping per-check asset propagation." + ) + return + + unique_checks = sorted( + {check.strip() for check in checks if check and check.strip() and check in INSTALL_AND_TEST_CHECKS} + ) + for check in unique_checks: + destination = get_assets_directory(root_dir, check) + if os.path.abspath(destination) == os.path.abspath(base_assets_dir): + continue + + if os.path.exists(destination): + shutil.rmtree(destination, ignore_errors=True) + + logger.info(f"Prepopulating assets for check '{check}' in {destination}.") + shutil.copytree(base_assets_dir, destination, dirs_exist_ok=True) + + async def run_check( semaphore: asyncio.Semaphore, package: str, @@ -130,6 +161,7 @@ async def run_check( logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() env["PROXY_URL"] = f"http://localhost:{proxy_port}" + env["CHECK_ENV"] = check try: proc = await asyncio.create_subprocess_exec( *cmd, @@ -474,12 +506,15 @@ def handler(signum, frame): logger.error(f"Unable to prepare test proxy executable for recording restore: {exc}") sys.exit(1) + base_assets_dir = get_assets_directory(root_dir, SHARED_RESTORE_ENV) try: - _restore_package_recordings(targeted_packages, proxy_executable) + _restore_package_recordings(targeted_packages, proxy_executable, base_assets_dir) except subprocess.CalledProcessError: logger.error("Recording restore pre-pass failed; aborting check execution.") sys.exit(1) + _prepopulate_check_asset_dirs(base_assets_dir, checks) + logger.info( f"Running {len(checks)} check(s) across {len(targeted_packages)} packages (max_parallel={args.max_parallel})." ) diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index e8f03f216377..7d3fb4b620fa 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -131,3 +131,18 @@ def set_envvar_defaults(settings: Optional[Dict[str, str]] = None) -> None: if settings: # this will override any defaults set prior in the case of override set_environment_from_dictionary(settings) + +def get_assets_directory(root: str, env_name: str) -> str: + """ + Resolves the location of the assets directory for test proxy recordings, given a root directory. + Outside of CI, this will end up being the root directory itself. + """ + if in_ci(): + dir = os.path.join(root, "l", env_name) + else: + return root + + if not os.path.exists(dir): + os.makedirs(dir) + + return dir \ No newline at end of file diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index 72a308601f81..e72191066f60 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -22,7 +22,7 @@ from urllib.parse import urlparse from urllib3.exceptions import SSLError -from ci_tools.variables import in_ci +from ci_tools.variables import in_ci, get_assets_directory from .config import PROXY_URL from .fake_credentials import FAKE_ACCESS_TOKEN, FAKE_ID, SERVICEBUS_FAKE_SAS, SANITIZED @@ -388,10 +388,6 @@ def start_test_proxy(request) -> None: log_suffix = _get_proxy_log_suffix() log = open(os.path.join(root, f"_proxy_log_{log_suffix}.log"), "a") - # os.environ["PROXY_ASSETS_FOLDER"] = os.path.join(root, "l", log_suffix) - # if not os.path.exists(os.environ["PROXY_ASSETS_FOLDER"]): - # os.makedirs(os.environ["PROXY_ASSETS_FOLDER"]) - tool_name = prepare_local_tool(root) if requires_https: @@ -405,17 +401,22 @@ def start_test_proxy(request) -> None: else: passenv = {} - # When the proxy is started in context of a file, deletions of files under that directory crashes the test-proxy + # When the proxy is started in context of a directory, deletions of files under that directory crashes the test-proxy # due to how asp.net kestrel loads configuration files. We can disable this behavior by setting this environment # variable in env for the proxy process, which will allow us to clean up the --isolate directories without crashing # running proxies. passenv["DOTNET_HOSTBUILDER__RELOADCONFIGONCHANGE"] = "false" + # in CI, when multiple environments are running in parallel, we need to isolate the storage locations + # such that each proxy environment has its own storage location. This will prevent race conditions where the same + # check is accessing the same file storage as another parallel check + start_location = get_assets_directory(root, os.getenv("CHECK_ENV", "")) + # If they are already set, override what we give the proxy with what is in os.environ passenv.update(os.environ) proc = subprocess.Popen( - shlex.split(f'{tool_name} start --storage-location="{root}" -- --urls "{PROXY_URL}"'), + shlex.split(f'{tool_name} start --storage-location="{start_location}" -- --urls "{PROXY_URL}"'), stdout=log or subprocess.DEVNULL, stderr=log or subprocess.STDOUT, env=passenv, From 141aa3a0963820c66481960b5d427a6307829e94 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 29 Jan 2026 02:03:46 +0000 Subject: [PATCH 56/76] changes to proxy startup --- .../azure-sdk-tools/devtools_testutils/proxy_startup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index e72191066f60..cf9f64c4b9dd 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -410,13 +410,14 @@ def start_test_proxy(request) -> None: # in CI, when multiple environments are running in parallel, we need to isolate the storage locations # such that each proxy environment has its own storage location. This will prevent race conditions where the same # check is accessing the same file storage as another parallel check - start_location = get_assets_directory(root, os.getenv("CHECK_ENV", "")) + assets_location = get_assets_directory(root, os.getenv("CHECK_ENV", "")) + passenv["PROXY_ASSETS_LOCATION"] = assets_location # If they are already set, override what we give the proxy with what is in os.environ passenv.update(os.environ) proc = subprocess.Popen( - shlex.split(f'{tool_name} start --storage-location="{start_location}" -- --urls "{PROXY_URL}"'), + shlex.split(f'{tool_name} start --storage-location="{root}" -- --urls "{PROXY_URL}"'), stdout=log or subprocess.DEVNULL, stderr=log or subprocess.STDOUT, env=passenv, From 9e2dcfb060965630782f31834c1917c50542ce46 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 30 Jan 2026 21:55:53 +0000 Subject: [PATCH 57/76] opportunity for precache, but it's not technically necessary --- .gitignore | 1 + eng/scripts/dispatch_checks.py | 29 +++++++++---------- .../azure-sdk-tools/ci_tools/variables.py | 15 ---------- .../devtools_testutils/proxy_startup.py | 8 +---- 4 files changed, 16 insertions(+), 37 deletions(-) diff --git a/.gitignore b/.gitignore index 84bf07aea48c..4b64cafe2e1f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Default Assets restore directory .assets +.assets_distributed # Python cache __pycache__/ diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 440c113fbe31..a9b5d3e3df81 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -13,7 +13,7 @@ from azpysdk.proxy_ports import get_proxy_port_for_check from ci_tools.functions import discover_targeted_packages -from ci_tools.variables import in_ci, get_assets_directory +from ci_tools.variables import in_ci from ci_tools.scenario.generation import build_whl_for_req, replace_dev_reqs from ci_tools.logging import configure_logging, logger from ci_tools.environment_exclusions import is_check_enabled, CHECK_DEFAULTS @@ -71,9 +71,7 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: return any(check in INSTALL_AND_TEST_CHECKS for check in checks) -def _restore_package_recordings( - packages: List[str], proxy_executable: str, assets_dir: Optional[str] = None -) -> None: +def _restore_package_recordings(packages: List[str], proxy_executable: str, assets_dir: Optional[str] = None) -> None: # azure template has a fake tag for demonstration purposes, skip restore on that one. unique_packages = [package for package in list(dict.fromkeys(packages)) if "azure-template" not in package] @@ -107,9 +105,7 @@ def _prepopulate_check_asset_dirs(base_assets_dir: str, checks: List[str]) -> No return if not base_assets_dir or not os.path.exists(base_assets_dir): - logger.warning( - f"Base assets directory {base_assets_dir} missing; skipping per-check asset propagation." - ) + logger.warning(f"Base assets directory {base_assets_dir} missing; skipping per-check asset propagation.") return unique_checks = sorted( @@ -161,7 +157,9 @@ async def run_check( logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() env["PROXY_URL"] = f"http://localhost:{proxy_port}" - env["CHECK_ENV"] = check + + if in_ci(): + env["PROXY_ASSETS_FOLDER"] = os.path.join(root_dir, ".assets_distributed", str(proxy_port)) try: proc = await asyncio.create_subprocess_exec( *cmd, @@ -499,6 +497,7 @@ def handler(signum, frame): logger.error("No valid checks provided via -c/--checks.") sys.exit(2) + # ensure that the proxy exe is available before we start running checks that may need to populate it if in_ci() and _checks_require_recording_restore(checks): try: proxy_executable = prepare_local_tool(root_dir) @@ -506,14 +505,14 @@ def handler(signum, frame): logger.error(f"Unable to prepare test proxy executable for recording restore: {exc}") sys.exit(1) - base_assets_dir = get_assets_directory(root_dir, SHARED_RESTORE_ENV) - try: - _restore_package_recordings(targeted_packages, proxy_executable, base_assets_dir) - except subprocess.CalledProcessError: - logger.error("Recording restore pre-pass failed; aborting check execution.") - sys.exit(1) + # base_assets_dir = get_assets_directory(root_dir, SHARED_RESTORE_ENV) + # try: + # _restore_package_recordings(targeted_packages, proxy_executable, base_assets_dir) + # except subprocess.CalledProcessError: + # logger.error("Recording restore pre-pass failed; aborting check execution.") + # sys.exit(1) - _prepopulate_check_asset_dirs(base_assets_dir, checks) + # _prepopulate_check_asset_dirs(base_assets_dir, checks) logger.info( f"Running {len(checks)} check(s) across {len(targeted_packages)} packages (max_parallel={args.max_parallel})." diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index 7d3fb4b620fa..e8f03f216377 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -131,18 +131,3 @@ def set_envvar_defaults(settings: Optional[Dict[str, str]] = None) -> None: if settings: # this will override any defaults set prior in the case of override set_environment_from_dictionary(settings) - -def get_assets_directory(root: str, env_name: str) -> str: - """ - Resolves the location of the assets directory for test proxy recordings, given a root directory. - Outside of CI, this will end up being the root directory itself. - """ - if in_ci(): - dir = os.path.join(root, "l", env_name) - else: - return root - - if not os.path.exists(dir): - os.makedirs(dir) - - return dir \ No newline at end of file diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index cf9f64c4b9dd..edc982a09502 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -22,7 +22,7 @@ from urllib.parse import urlparse from urllib3.exceptions import SSLError -from ci_tools.variables import in_ci, get_assets_directory +from ci_tools.variables import in_ci # from .config import PROXY_URL from .fake_credentials import FAKE_ACCESS_TOKEN, FAKE_ID, SERVICEBUS_FAKE_SAS, SANITIZED @@ -407,12 +407,6 @@ def start_test_proxy(request) -> None: # running proxies. passenv["DOTNET_HOSTBUILDER__RELOADCONFIGONCHANGE"] = "false" - # in CI, when multiple environments are running in parallel, we need to isolate the storage locations - # such that each proxy environment has its own storage location. This will prevent race conditions where the same - # check is accessing the same file storage as another parallel check - assets_location = get_assets_directory(root, os.getenv("CHECK_ENV", "")) - passenv["PROXY_ASSETS_LOCATION"] = assets_location - # If they are already set, override what we give the proxy with what is in os.environ passenv.update(os.environ) From c47086aaf69c9e3c22469b4cd2aa1bcad7de4209 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 31 Jan 2026 00:21:11 +0000 Subject: [PATCH 58/76] actually write to different ports. this should eliminate the conflicts --- eng/scripts/dispatch_checks.py | 63 +--------------------- eng/tools/azure-sdk-tools/azpysdk/Check.py | 11 +++- 2 files changed, 11 insertions(+), 63 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index a9b5d3e3df81..d9841db4cd02 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -42,7 +42,7 @@ class ProxyProcess: PROXY_STATUS_SUFFIX = "/Info/Available" PROXY_STARTUP_TIMEOUT = 60 -BASE_PROXY_PORT = 5000 +BASE_PROXY_PORT = 5050 # Checks implemented via InstallAndTest all require shared recording restore behavior. INSTALL_AND_TEST_CHECKS = {"whl", "whl_no_aio", "sdist", "devtest", "optional", "latestdependency", "mindependency"} SHARED_RESTORE_ENV = "__shared_restore__" @@ -71,58 +71,6 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: return any(check in INSTALL_AND_TEST_CHECKS for check in checks) -def _restore_package_recordings(packages: List[str], proxy_executable: str, assets_dir: Optional[str] = None) -> None: - # azure template has a fake tag for demonstration purposes, skip restore on that one. - unique_packages = [package for package in list(dict.fromkeys(packages)) if "azure-template" not in package] - - for package in unique_packages: - assets_path = os.path.join(package, "assets.json") - if not os.path.exists(assets_path): - logger.debug(f"No assets.json found under {package}; skipping restore.") - continue - cmd = [proxy_executable, "restore", "-a", assets_path] - logger.info(f"Restoring recordings for {package} using assets file {assets_path}.") - try: - env = os.environ.copy() - if assets_dir: - env["PROXY_ASSETS_FOLDER"] = assets_dir - completed = subprocess.run(cmd, capture_output=True, text=True, check=True, env=env) - if completed.stdout: - logger.debug(completed.stdout.strip()) - if completed.stderr: - logger.debug(completed.stderr.strip()) - except subprocess.CalledProcessError as exc: - logger.error(f"Failed to restore recordings for {package}: {exc}") - if exc.stdout: - logger.error(exc.stdout.strip()) - if exc.stderr: - logger.error(exc.stderr.strip()) - raise - - -def _prepopulate_check_asset_dirs(base_assets_dir: str, checks: List[str]) -> None: - if not in_ci(): - return - - if not base_assets_dir or not os.path.exists(base_assets_dir): - logger.warning(f"Base assets directory {base_assets_dir} missing; skipping per-check asset propagation.") - return - - unique_checks = sorted( - {check.strip() for check in checks if check and check.strip() and check in INSTALL_AND_TEST_CHECKS} - ) - for check in unique_checks: - destination = get_assets_directory(root_dir, check) - if os.path.abspath(destination) == os.path.abspath(base_assets_dir): - continue - - if os.path.exists(destination): - shutil.rmtree(destination, ignore_errors=True) - - logger.info(f"Prepopulating assets for check '{check}' in {destination}.") - shutil.copytree(base_assets_dir, destination, dirs_exist_ok=True) - - async def run_check( semaphore: asyncio.Semaphore, package: str, @@ -505,15 +453,6 @@ def handler(signum, frame): logger.error(f"Unable to prepare test proxy executable for recording restore: {exc}") sys.exit(1) - # base_assets_dir = get_assets_directory(root_dir, SHARED_RESTORE_ENV) - # try: - # _restore_package_recordings(targeted_packages, proxy_executable, base_assets_dir) - # except subprocess.CalledProcessError: - # logger.error("Recording restore pre-pass failed; aborting check execution.") - # sys.exit(1) - - # _prepopulate_check_asset_dirs(base_assets_dir, checks) - logger.info( f"Running {len(checks)} check(s) across {len(targeted_packages)} packages (max_parallel={args.max_parallel})." ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index ebf89138e66b..29386ff5df36 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -112,7 +112,16 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: def get_executable(self, isolate: bool, check_name: str, executable: str, package_folder: str) -> Tuple[str, str]: """Get the Python executable that should be used for this check.""" proxy_url = get_proxy_url_for_check(check_name) - os.environ["PROXY_URL"] = proxy_url + if not os.getenv("PROXY_URL"): + os.environ["PROXY_URL"] = proxy_url + else: + current_url = os.getenv("PROXY_URL") + if current_url != proxy_url: + logger.debug( + "PROXY_URL already set to %s, keeping existing assignment instead of %s", + current_url, + proxy_url, + ) # Keep venvs under a shared repo-level folder to prevent nested import errors during pytest collection package_name = os.path.basename(os.path.normpath(package_folder)) shared_venv_root = os.path.join(REPO_ROOT, ".venv", package_name) From 9decc41308e3211cb5036393a2155f23663187ec Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 31 Jan 2026 02:04:06 +0000 Subject: [PATCH 59/76] the port we're passing into the process is not being honored --- eng/scripts/dispatch_checks.py | 1 + eng/tools/azure-sdk-tools/azpysdk/Check.py | 12 ------------ .../azure-sdk-tools/azpysdk/install_and_test.py | 3 +++ .../azure-sdk-tools/devtools_testutils/config.py | 3 ++- .../devtools_testutils/proxy_startup.py | 8 ++++---- .../devtools_testutils/proxy_testcase.py | 11 +++++------ .../devtools_testutils/sanitizers.py | 14 +++++++------- 7 files changed, 22 insertions(+), 30 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index d9841db4cd02..386fbb8c4c75 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -229,6 +229,7 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): if not is_check_enabled(package, check, CHECK_DEFAULTS.get(check, True)): logger.warning(f"Skipping disabled check {check} for package {package}") continue + logger.info(f"Assigning proxy port {next_proxy_port} to check {check} for package {package}") scheduled.append((package, check, next_proxy_port)) next_proxy_port += 1 diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index 29386ff5df36..cbc122dfca8f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -21,7 +21,6 @@ ) from ci_tools.variables import discover_repo_root, in_ci from ci_tools.logging import logger -from .proxy_ports import get_proxy_url_for_check # right now, we are assuming you HAVE to be in the azure-sdk-tools repo # we assume this because we don't know how a dev has installed this package, and might be @@ -111,17 +110,6 @@ def create_venv(self, isolate: bool, venv_location: str) -> str: def get_executable(self, isolate: bool, check_name: str, executable: str, package_folder: str) -> Tuple[str, str]: """Get the Python executable that should be used for this check.""" - proxy_url = get_proxy_url_for_check(check_name) - if not os.getenv("PROXY_URL"): - os.environ["PROXY_URL"] = proxy_url - else: - current_url = os.getenv("PROXY_URL") - if current_url != proxy_url: - logger.debug( - "PROXY_URL already set to %s, keeping existing assignment instead of %s", - current_url, - proxy_url, - ) # Keep venvs under a shared repo-level folder to prevent nested import errors during pytest collection package_name = os.path.basename(os.path.normpath(package_folder)) shared_venv_root = os.path.join(REPO_ROOT, ".venv", package_name) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index a1f496755fa8..dd0cc949c7ef 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -169,6 +169,9 @@ def install_all_requirements( def get_env_defaults(self) -> Dict[str, str]: defaults: Dict[str, str] = {} + + if os.getenv("PROXY_URL") is not None: + defaults["PROXY_URL"] = str(os.getenv("PROXY_URL")) if self.proxy_url: defaults["PROXY_URL"] = self.proxy_url return defaults diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/config.py b/eng/tools/azure-sdk-tools/devtools_testutils/config.py index af127405b9f3..9bc15d89cae0 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/config.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/config.py @@ -12,9 +12,10 @@ ENV_LIVE_TEST = "AZURE_TEST_RUN_LIVE" -PROXY_URL = os.getenv("PROXY_URL", "http://localhost:5000").rstrip("/") TEST_SETTING_FILENAME = "testsettings_local.cfg" +def PROXY_URL(): + return os.getenv("PROXY_URL", "http://localhost:5000").rstrip("/") class TestConfig(object): # pylint: disable=too-few-public-methods def __init__(self, parent_parsers=None, config_file=None): diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index edc982a09502..242d99d215a0 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -45,7 +45,7 @@ CONTAINER_STARTUP_TIMEOUT = 60 PROXY_MANUALLY_STARTED = os.getenv("PROXY_MANUAL_START", False) -PROXY_CHECK_URL = PROXY_URL + "/Info/Available" +PROXY_CHECK_URL = PROXY_URL() + "/Info/Available" TOOL_ENV_VAR = "PROXY_PID" AVAILABLE_TEST_PROXY_BINARIES = { @@ -101,7 +101,7 @@ def _get_proxy_log_suffix() -> str: """Derive a log suffix based on the configured proxy port.""" - proxy_url = os.getenv("PROXY_URL", PROXY_URL) + proxy_url = os.getenv("PROXY_URL", PROXY_URL()) normalized = proxy_url if "://" in proxy_url else f"http://{proxy_url}" try: parsed = urlparse(normalized) @@ -370,7 +370,7 @@ def start_test_proxy(request) -> None: """ repo_root = ascend_to_root(request.node.items[0].module.__file__) - requires_https = PROXY_URL.startswith("https://") + requires_https = PROXY_URL().startswith("https://") if requires_https: check_certificate_location(repo_root) @@ -411,7 +411,7 @@ def start_test_proxy(request) -> None: passenv.update(os.environ) proc = subprocess.Popen( - shlex.split(f'{tool_name} start --storage-location="{root}" -- --urls "{PROXY_URL}"'), + shlex.split(f'{tool_name} start --storage-location="{root}" -- --urls "{PROXY_URL()}"'), stdout=log or subprocess.DEVNULL, stderr=log or subprocess.STDOUT, env=passenv, diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py index 2e31916d9102..9a6fbf51c72f 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py @@ -46,11 +46,10 @@ # defaults -RECORDING_START_URL = "{}/record/start".format(PROXY_URL) -RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL) -PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL) -PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL) - +RECORDING_START_URL = "{}/record/start".format(PROXY_URL()) +RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL()) +PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL()) +PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL()) class RecordedTransport(str, Enum): """Enum for specifying which transports to record in the test proxy.""" @@ -159,7 +158,7 @@ def stop_record_or_playback(test_id: str, recording_id: str, test_variables: "Di def get_proxy_netloc() -> "Dict[str, str]": - parsed_result = url_parse.urlparse(PROXY_URL) + parsed_result = url_parse.urlparse(PROXY_URL()) return {"scheme": parsed_result.scheme, "netloc": parsed_result.netloc} diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/sanitizers.py b/eng/tools/azure-sdk-tools/devtools_testutils/sanitizers.py index 83dbd7be7faa..7bdf6624dbc3 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/sanitizers.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/sanitizers.py @@ -469,7 +469,7 @@ def add_batch_sanitizers(sanitizers: Dict[str, List[Optional[Dict[str, str]]]], http_client = get_http_client() http_client.request( method="POST", - url="{}/Admin/AddSanitizers".format(PROXY_URL), + url="{}/Admin/AddSanitizers".format(PROXY_URL()), headers=headers_to_send, body=json.dumps(data).encode("utf-8"), ) @@ -501,7 +501,7 @@ def remove_batch_sanitizers(sanitizers: List[str], headers: Optional[Dict] = Non http_client = get_http_client() http_client.request( method="POST", - url="{}/Admin/RemoveSanitizers".format(PROXY_URL), + url="{}/Admin/RemoveSanitizers".format(PROXY_URL()), headers=headers_to_send, body=json.dumps(data).encode("utf-8"), ) @@ -718,7 +718,7 @@ def _send_matcher_request(matcher: str, headers: Dict, parameters: Optional[Dict http_client = get_http_client() http_client.request( method="POST", - url=f"{PROXY_URL}/Admin/SetMatcher", + url=f"{PROXY_URL()}/Admin/SetMatcher", headers=headers_to_send, body=json.dumps(parameters).encode("utf-8"), ) @@ -747,7 +747,7 @@ def _send_recording_options_request(parameters: Dict, headers: Optional[Dict] = http_client = get_http_client() http_client.request( method="POST", - url=f"{PROXY_URL}/Admin/SetRecordingOptions", + url=f"{PROXY_URL()}/Admin/SetRecordingOptions", headers=headers_to_send, body=json.dumps(parameters).encode("utf-8"), ) @@ -771,7 +771,7 @@ def _send_reset_request(headers: Dict) -> None: headers_to_send[key] = headers[key] http_client = get_http_client() - http_client.request(method="POST", url=f"{PROXY_URL}/Admin/Reset", headers=headers_to_send) + http_client.request(method="POST", url=f"{PROXY_URL()}/Admin/Reset", headers=headers_to_send) def _send_sanitizer_request(sanitizer: str, parameters: Dict, headers: Optional[Dict] = None) -> None: @@ -796,7 +796,7 @@ def _send_sanitizer_request(sanitizer: str, parameters: Dict, headers: Optional[ http_client = get_http_client() http_client.request( method="POST", - url="{}/Admin/AddSanitizer".format(PROXY_URL), + url="{}/Admin/AddSanitizer".format(PROXY_URL()), headers=headers_to_send, body=json.dumps(parameters).encode("utf-8"), ) @@ -823,7 +823,7 @@ def _send_transform_request(transform: str, parameters: Dict, headers: Optional[ http_client = get_http_client() http_client.request( method="POST", - url=f"{PROXY_URL}/Admin/AddTransform", + url=f"{PROXY_URL()}/Admin/AddTransform", headers=headers_to_send, body=json.dumps(parameters).encode("utf-8"), ) From 933d2270236765a67dd4979d859aae33416541b2 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 31 Jan 2026 02:51:12 +0000 Subject: [PATCH 60/76] locally we're honoring all the port settings now --- eng/tools/azure-sdk-tools/ci_tools/variables.py | 1 - .../azure-sdk-tools/devtools_testutils/config.py | 13 +++++++++++++ .../devtools_testutils/proxy_startup.py | 3 +-- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/eng/tools/azure-sdk-tools/ci_tools/variables.py b/eng/tools/azure-sdk-tools/ci_tools/variables.py index e8f03f216377..5732d3deb9f0 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/variables.py +++ b/eng/tools/azure-sdk-tools/ci_tools/variables.py @@ -99,7 +99,6 @@ def in_analyze_weekly() -> int: DEFAULT_ENVIRONMENT_VARIABLES = { "SPHINX_APIDOC_OPTIONS": "members,undoc-members,inherited-members", - "PROXY_URL": "http://localhost:5000", "VIRTUALENV_WHEEL": "0.45.1", "VIRTUALENV_PIP": "24.0", "VIRTUALENV_SETUPTOOLS": "75.3.2", diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/config.py b/eng/tools/azure-sdk-tools/devtools_testutils/config.py index 9bc15d89cae0..a6eb6a11f686 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/config.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/config.py @@ -15,6 +15,19 @@ TEST_SETTING_FILENAME = "testsettings_local.cfg" def PROXY_URL(): + # If PROXY_ASSETS_FOLDER is set, extract the port from the last folder + proxy_assets_folder = os.getenv("PROXY_ASSETS_FOLDER") + if proxy_assets_folder: + # Remove trailing slashes and get the last path component + folder = proxy_assets_folder.rstrip("/").rstrip("\\") + port = os.path.basename(folder) + # Verify it's a valid port number + try: + int(port) + return f"http://localhost:{port}" + except ValueError: + pass # Not a valid port, fall through to default + return os.getenv("PROXY_URL", "http://localhost:5000").rstrip("/") class TestConfig(object): # pylint: disable=too-few-public-methods diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index 242d99d215a0..bf17b3d26708 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -100,8 +100,7 @@ def _get_proxy_log_suffix() -> str: """Derive a log suffix based on the configured proxy port.""" - - proxy_url = os.getenv("PROXY_URL", PROXY_URL()) + proxy_url = PROXY_URL() normalized = proxy_url if "://" in proxy_url else f"http://{proxy_url}" try: parsed = urlparse(normalized) From 689705f30bdb0c38d017e4cb68a1d4636cd625b2 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Mon, 2 Feb 2026 20:22:10 +0000 Subject: [PATCH 61/76] remove wonky PROXY_ASSETS_FOLDER hack that was workaround-addressing the solved bug in install_and_test --- .../azure-sdk-tools/azpysdk/install_and_test.py | 4 +++- .../azure-sdk-tools/devtools_testutils/config.py | 13 ------------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index dd0cc949c7ef..7e76a0903529 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -116,7 +116,9 @@ def run_pytest( cwd: Optional[str] = None, ) -> int: pytest_command = ["-m", "pytest", *pytest_args] - environment = {"PYTHONPYCACHEPREFIX": staging_directory} + + environment = os.environ.copy() + environment.update({"PYTHONPYCACHEPREFIX": staging_directory}) pytest_result = self.run_venv_command( executable, diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/config.py b/eng/tools/azure-sdk-tools/devtools_testutils/config.py index a6eb6a11f686..9bc15d89cae0 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/config.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/config.py @@ -15,19 +15,6 @@ TEST_SETTING_FILENAME = "testsettings_local.cfg" def PROXY_URL(): - # If PROXY_ASSETS_FOLDER is set, extract the port from the last folder - proxy_assets_folder = os.getenv("PROXY_ASSETS_FOLDER") - if proxy_assets_folder: - # Remove trailing slashes and get the last path component - folder = proxy_assets_folder.rstrip("/").rstrip("\\") - port = os.path.basename(folder) - # Verify it's a valid port number - try: - int(port) - return f"http://localhost:{port}" - except ValueError: - pass # Not a valid port, fall through to default - return os.getenv("PROXY_URL", "http://localhost:5000").rstrip("/") class TestConfig(object): # pylint: disable=too-few-public-methods From 6dd64b84dfee506ecfc47e0f84768e33dd0c216f Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Mon, 2 Feb 2026 22:45:02 +0000 Subject: [PATCH 62/76] now that we've moved the pycache directory out, we shouldn't run into clashing package versions anymore --- eng/tools/azure-sdk-tools/azpysdk/install_and_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 7e76a0903529..16b8769688a2 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -123,7 +123,7 @@ def run_pytest( pytest_result = self.run_venv_command( executable, pytest_command, - cwd=(cwd or staging_directory), + cwd=package_dir, immediately_dump=True, additional_environment_settings=environment, ) From 73c0f1c5268bd8fd408afec31747268f18dca23d Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Mon, 2 Feb 2026 22:55:42 +0000 Subject: [PATCH 63/76] add dep that is missing from dev_reqs --- sdk/ml/azure-ai-ml/dev_requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/ml/azure-ai-ml/dev_requirements.txt b/sdk/ml/azure-ai-ml/dev_requirements.txt index eedda6cf8025..fb3341648e00 100644 --- a/sdk/ml/azure-ai-ml/dev_requirements.txt +++ b/sdk/ml/azure-ai-ml/dev_requirements.txt @@ -23,3 +23,4 @@ pytest-reportlog python-dotenv azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "CPython" and python_version < "3.13" azureml-dataprep-rslex>=2.22.0; platform_python_implementation == "PyPy" and python_version < "3.10" +pip From 39a6e0c39a4e50059109f8ce87e57410854af838 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 4 Feb 2026 14:24:03 -0800 Subject: [PATCH 64/76] once a check has exited, the process will just _end_, we will no longer wait for sys.exit() which honors atexit() etc. cosmos has zombie processes and those are hanging the sys.exit() forever --- eng/tools/azure-sdk-tools/azpysdk/main.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/main.py b/eng/tools/azure-sdk-tools/azpysdk/main.py index 7c6f060f905d..af8702e8931e 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/main.py +++ b/eng/tools/azure-sdk-tools/azpysdk/main.py @@ -138,13 +138,14 @@ def main(argv: Optional[Sequence[str]] = None) -> int: try: result = args.func(args) - return int(result or 0) + exit_code = int(result or 0) + os._exit(exit_code) except KeyboardInterrupt: logger.error("Interrupted by user") - return 130 + os._exit(130) except Exception as exc: # pragma: no cover - simple top-level error handling logger.error(f"Error: {exc}") - return 2 + os._exit(2) if __name__ == "__main__": From 10ff3522be4fa6857cd445ef4e02b73490533935 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 4 Feb 2026 15:02:22 -0800 Subject: [PATCH 65/76] we were not forwarding the check value properly. THATS why we were exploding. --- eng/scripts/dispatch_checks.py | 15 ++++++++++++--- eng/tools/azure-sdk-tools/azpysdk/devtest.py | 5 +++++ .../azure-sdk-tools/azpysdk/install_and_test.py | 5 ++++- eng/tools/azure-sdk-tools/azpysdk/optional.py | 7 +++++++ eng/tools/azure-sdk-tools/azpysdk/sdist.py | 5 +++++ eng/tools/azure-sdk-tools/azpysdk/whl.py | 5 +++++ eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py | 5 +++++ 7 files changed, 43 insertions(+), 4 deletions(-) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 386fbb8c4c75..f03ac1745609 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -79,6 +79,7 @@ async def run_check( idx: int, total: int, proxy_port: int, + mark_arg: Optional[str], ) -> CheckResult: """Run a single check (subprocess) within a concurrency semaphore, capturing output and timing. @@ -102,6 +103,8 @@ async def run_check( async with semaphore: start = time.time() cmd = base_args + [check, "--isolate", package] + if mark_arg: + cmd += ["--mark_arg", mark_arg] logger.info(f"[START {idx}/{total}] {check} :: {package}\nCMD: {' '.join(cmd)}") env = os.environ.copy() env["PROXY_URL"] = f"http://localhost:{proxy_port}" @@ -182,7 +185,7 @@ def summarize(results: List[CheckResult]) -> int: return worst -async def run_all_checks(packages, checks, max_parallel, wheel_dir): +async def run_all_checks(packages, checks, max_parallel, wheel_dir, mark_arg: Optional[str]): """Run all checks for all packages concurrently and return the worst exit code. :param packages: Iterable of package paths to run checks against. @@ -236,7 +239,11 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir): total = len(scheduled) for idx, (package, check, proxy_port) in enumerate(scheduled, start=1): - tasks.append(asyncio.create_task(run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port))) + tasks.append( + asyncio.create_task( + run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port, mark_arg) + ) + ) # Handle Ctrl+C gracefully pending = set(tasks) @@ -463,7 +470,9 @@ def handler(signum, frame): try: if in_ci(): logger.info(f"Ensuring {len(checks)} test proxies are running for requested checks...") - exit_code = asyncio.run(run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir)) + exit_code = asyncio.run( + run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir, args.mark_arg) + ) except KeyboardInterrupt: logger.error("Aborted by user.") exit_code = 130 diff --git a/eng/tools/azure-sdk-tools/azpysdk/devtest.py b/eng/tools/azure-sdk-tools/azpysdk/devtest.py index 109e88118e15..31ec05c569c3 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/devtest.py +++ b/eng/tools/azure-sdk-tools/azpysdk/devtest.py @@ -145,6 +145,11 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + p.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) def before_pytest( self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index 16b8769688a2..b061a9d237c8 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -222,7 +222,10 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List f"--log-cli-level={log_level}", ] - pytest_args = [*default_args, *self.additional_pytest_args] + pytest_args = [*default_args] + if getattr(args, "mark_arg", None): + pytest_args.extend(["-m", args.mark_arg]) + pytest_args.extend(self.additional_pytest_args) if getattr(args, "pytest_args", None): pytest_args.extend(args.pytest_args) diff --git a/eng/tools/azure-sdk-tools/azpysdk/optional.py b/eng/tools/azure-sdk-tools/azpysdk/optional.py index df951b6a7542..307012666e5c 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/optional.py +++ b/eng/tools/azure-sdk-tools/azpysdk/optional.py @@ -46,6 +46,11 @@ def register( help="The target environment. If not provided, all optional environments will be run.", required=False, ) + p.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) def run(self, args: argparse.Namespace) -> int: """Run the optional check command.""" @@ -173,6 +178,8 @@ def prepare_and_test_optional( "--ignore=samples", f"--log-cli-level={log_level}", ] + if getattr(args, "mark_arg", None): + pytest_args.extend(["-m", args.mark_arg]) pytest_args.extend(config.get("additional_pytest_args", [])) logger.info(f"Invoking tests for package {package_name} and optional environment {env_name}") diff --git a/eng/tools/azure-sdk-tools/azpysdk/sdist.py b/eng/tools/azure-sdk-tools/azpysdk/sdist.py index 8f846db66311..af092a29e10f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/sdist.py +++ b/eng/tools/azure-sdk-tools/azpysdk/sdist.py @@ -26,3 +26,8 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + parser.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl.py b/eng/tools/azure-sdk-tools/azpysdk/whl.py index d710067a0657..f01f6ea17bdd 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl.py @@ -27,3 +27,8 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + parser.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py index 5ba732fd11e0..ba7bb0f433d5 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py +++ b/eng/tools/azure-sdk-tools/azpysdk/whl_no_aio.py @@ -27,6 +27,11 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + parser.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) def before_pytest( self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace From bec9ad88469eb4e55f6ca2df5427172be218d6b4 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 4 Feb 2026 15:47:28 -0800 Subject: [PATCH 66/76] save the logging of the executing command --- eng/scripts/dispatch_checks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index f03ac1745609..3dfc7fbfc323 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -112,6 +112,7 @@ async def run_check( if in_ci(): env["PROXY_ASSETS_FOLDER"] = os.path.join(root_dir, ".assets_distributed", str(proxy_port)) try: + logger.info(*cmd) proc = await asyncio.create_subprocess_exec( *cmd, cwd=package, From 4fd3467ae35ba34befd13d3959dde043b23dc579 Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 4 Feb 2026 16:04:44 -0800 Subject: [PATCH 67/76] fix mindependency! --- eng/tools/azure-sdk-tools/azpysdk/mindependency.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py index 53f587105d44..e41d6fa7e123 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py +++ b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py @@ -31,3 +31,9 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + parser.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) + From e1ba28bdb7c50e8f20e69e40505647995a29b92e Mon Sep 17 00:00:00 2001 From: "Scott Beddall (from Dev Box)" Date: Wed, 4 Feb 2026 16:06:45 -0800 Subject: [PATCH 68/76] remove mark argument from analyze, where it doesn't make sense --- eng/pipelines/templates/steps/run_bandit.yml | 1 - eng/pipelines/templates/steps/run_breaking_changes.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/eng/pipelines/templates/steps/run_bandit.yml b/eng/pipelines/templates/steps/run_bandit.yml index 24a605a49846..2086c39fbeea 100644 --- a/eng/pipelines/templates/steps/run_bandit.yml +++ b/eng/pipelines/templates/steps/run_bandit.yml @@ -14,7 +14,6 @@ steps: scriptPath: 'eng/scripts/dispatch_checks.py' arguments: >- "$(TargetingString)" - --mark_arg="${{ parameters.TestMarkArgument }}" --service="${{ parameters.ServiceDirectory }}" --checks="bandit" --disable-compatibility-filter diff --git a/eng/pipelines/templates/steps/run_breaking_changes.yml b/eng/pipelines/templates/steps/run_breaking_changes.yml index 7cb3e496be3d..7154f6da5e56 100644 --- a/eng/pipelines/templates/steps/run_breaking_changes.yml +++ b/eng/pipelines/templates/steps/run_breaking_changes.yml @@ -11,7 +11,6 @@ steps: scriptPath: 'eng/scripts/dispatch_checks.py' arguments: >- "$(TargetingString)" - --mark_arg="${{ parameters.TestMarkArgument }}" --service="${{ parameters.ServiceDirectory }}" --checks="breaking" --disable-compatibility-filter From 99870dad4e1e68002c1cc40fe47bc6de846b1530 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 5 Feb 2026 02:00:19 +0000 Subject: [PATCH 69/76] swapping over to set_checks instead of set_tox_environment. moved that function call. fixed mindependency to pass the pytest arg properly so that cosmos won't hang. --- eng/pipelines/templates/steps/build-test.yml | 10 ++---- eng/scripts/dispatch_checks.py | 9 ++---- .../scripts/set_checks.py | 6 ++-- .../azpysdk/dependency_check.py | 3 ++ .../azpysdk/install_and_test.py | 28 +--------------- .../azpysdk/latestdependency.py | 5 +++ .../azure-sdk-tools/azpysdk/mindependency.py | 1 - .../azure-sdk-tools/azpysdk/proxy_ports.py | 32 ++++--------------- .../devtools_testutils/proxy_startup.py | 4 +-- 9 files changed, 25 insertions(+), 73 deletions(-) rename scripts/devops_tasks/set_tox_environment.py => eng/scripts/set_checks.py (96%) diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index 954f3f9163a9..decf5040e51f 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -49,21 +49,15 @@ steps: $(PIP_EXE) freeze displayName: 'Prep Environment' - # this needs to move under eng/scripts from scripts/devops_tasks/ - # AND be renamed prior to merging any PR with this changeset - task: PythonScript@0 - displayName: 'Set Tox Environment' + displayName: 'Set Checks for Run' inputs: - scriptPath: 'scripts/devops_tasks/set_tox_environment.py' + scriptPath: 'eng/scripts/set_checks.py' arguments: >- --unsupported="$(UnsupportedToxEnvironments)" --override="$(Run.ToxCustomEnvs)" --team-project="$(System.TeamProject)" - - template: /eng/common/testproxy/test-proxy-tool.yml - parameters: - runProxy: false - - ${{ parameters.BeforeTestSteps }} - ${{ if eq('true', parameters.UseFederatedAuth) }}: diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index 3dfc7fbfc323..a0f2e5db1d51 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -5,13 +5,10 @@ import time import signal import shutil -import shlex import subprocess -import urllib.request from dataclasses import dataclass from typing import IO, List, Optional -from azpysdk.proxy_ports import get_proxy_port_for_check from ci_tools.functions import discover_targeted_packages from ci_tools.variables import in_ci from ci_tools.scenario.generation import build_whl_for_req, replace_dev_reqs @@ -112,7 +109,7 @@ async def run_check( if in_ci(): env["PROXY_ASSETS_FOLDER"] = os.path.join(root_dir, ".assets_distributed", str(proxy_port)) try: - logger.info(*cmd) + logger.info(" ".join(cmd)) proc = await asyncio.create_subprocess_exec( *cmd, cwd=package, @@ -241,9 +238,7 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir, mark_arg: Op for idx, (package, check, proxy_port) in enumerate(scheduled, start=1): tasks.append( - asyncio.create_task( - run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port, mark_arg) - ) + asyncio.create_task(run_check(semaphore, package, check, base_args, idx, total or 1, proxy_port, mark_arg)) ) # Handle Ctrl+C gracefully diff --git a/scripts/devops_tasks/set_tox_environment.py b/eng/scripts/set_checks.py similarity index 96% rename from scripts/devops_tasks/set_tox_environment.py rename to eng/scripts/set_checks.py index e776fd780f3d..8055c9ef732c 100644 --- a/scripts/devops_tasks/set_tox_environment.py +++ b/eng/scripts/set_checks.py @@ -29,6 +29,8 @@ def resolve_devops_variable(var_value: str) -> List[str]: return [] else: return [tox_env.strip() for tox_env in var_value.split(",") if tox_env.strip()] + else: + raise ValueError("Provided variable value is empty or None") def set_devops_value(resolved_set: List[str]) -> None: @@ -65,13 +67,13 @@ def process_ci_skips(glob_string: str, service: str) -> None: set_ci_variable(f"Skip.{check[0].upper()}{check[1:]}", "true") output_ci_warning( f"All targeted packages {all_packages} skip the {check} check. Omitting step from build.", - "set_tox_environment.py", + "set_checks.py", ) if __name__ == "__main__": parser = argparse.ArgumentParser( - description="This script is used to resolve a set of arguments (that correspond to devops runtime variables) and determine which tox environments should be run for the current job. " + description="This script is used to resolve a set of arguments (that correspond to devops runtime variables) and determine which checks should be run for the current job. " + "When running against a specific service directory, attempts to find entire analysis steps that can be skipped. EG if pylint is disabled for every package in a given service directory, that " + "step should never actually run." ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py index bc070e264435..f5bb6ffb67ef 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -195,6 +195,9 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List pytest_args = list(default_args) + if getattr(args, "mark_arg", None): + pytest_args.extend(["-m", args.mark_arg]) + if getattr(args, "pytest_args", None): pytest_args.extend(args.pytest_args) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index b061a9d237c8..ad4de899d60f 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -66,13 +66,6 @@ def run(self, args: argparse.Namespace) -> int: results.append(install_result) continue - try: - self.before_pytest(executable, package_dir, package_name, staging_directory, args) - except CalledProcessError as exc: - logger.error(f"Pre-pytest hook failed for {package_name}: {exc}") - results.append(exc.returncode or 1) - continue - pytest_args = self._build_pytest_args(package_dir, args) pytest_result = self.run_pytest(executable, staging_directory, package_dir, package_name, pytest_args) if pytest_result != 0: @@ -107,13 +100,7 @@ def check_coverage(self, executable: str, package_dir: str, package_name: str) - return 0 def run_pytest( - self, - executable: str, - staging_directory: str, - package_dir: str, - package_name: str, - pytest_args: List[str], - cwd: Optional[str] = None, + self, executable: str, staging_directory: str, package_dir: str, package_name: str, pytest_args: List[str] ) -> int: pytest_command = ["-m", "pytest", *pytest_args] @@ -147,7 +134,6 @@ def install_all_requirements( self._install_common_requirements(executable, package_dir) if self.should_install_dev_requirements(): self.install_dev_reqs(executable, args, package_dir) - self.after_dependencies_installed(executable, package_dir, staging_directory, args) except CalledProcessError as exc: logger.error(f"Failed to prepare dependencies for {package_name}: {exc}") return exc.returncode or 1 @@ -181,18 +167,6 @@ def get_env_defaults(self) -> Dict[str, str]: def should_install_dev_requirements(self) -> bool: return True - def after_dependencies_installed( - self, executable: str, package_dir: str, staging_directory: str, args: argparse.Namespace - ) -> None: - del executable, package_dir, staging_directory, args - return None - - def before_pytest( - self, executable: str, package_dir: str, package_name: str, staging_directory: str, args: argparse.Namespace - ) -> None: - del executable, package_dir, package_name, staging_directory, args - return None - def _install_common_requirements(self, executable: str, package_dir: str) -> None: install_into_venv(executable, PACKAGING_REQUIREMENTS, package_dir) diff --git a/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py index 15153ba163e1..68a069cf4887 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py +++ b/eng/tools/azure-sdk-tools/azpysdk/latestdependency.py @@ -26,3 +26,8 @@ def register( nargs=argparse.REMAINDER, help="Additional arguments forwarded to pytest.", ) + parser.add_argument( + "--mark_arg", + dest="mark_arg", + help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py index e41d6fa7e123..95e06c222e38 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/mindependency.py +++ b/eng/tools/azure-sdk-tools/azpysdk/mindependency.py @@ -36,4 +36,3 @@ def register( dest="mark_arg", help='Optional pytest marker expression passed as -m "" (e.g. "cosmosEmulator").', ) - diff --git a/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py index 59f0c9777a01..8a48be560ff1 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py +++ b/eng/tools/azure-sdk-tools/azpysdk/proxy_ports.py @@ -18,33 +18,13 @@ # NOTE: `import_all` shares the same configuration as the legacy `depends` # tox environment. All other entries match the tox environment names 1:1. CHECK_PROXY_PORTS: Dict[str, int] = { - "import_all": 5008, - "mypy": 5003, - "next-mypy": 5020, - "pylint": 5002, - "next-pylint": 5002, - "ruff": 5022, - "pyright": 5018, - "next-pyright": 5021, - "verifytypes": 5019, - "apistub": 5014, - "verify_sdist": 5010, - "verify_whl": 5009, "whl": DEFAULT_PROXY_PORT, - "whl_no_aio": 5004, - "sdist": 5005, - "samples": 5016, - "devtest": 5011, - "latestdependency": 5012, - "mindependency": 5013, - "bandit": 5015, - "verify_keywords": 5005, - "generate": DEFAULT_PROXY_PORT, - "breaking": 5017, - "sphinx": 5007, - "next-sphinx": 5023, - "optional": 5018, - "black": DEFAULT_PROXY_PORT, + "sdist": 5001, + "whl_no_aio": 5002, + "devtest": 5003, + "optional": 5004, + "mindependency": 5005, + "latestdependency": 5006, } diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index bf17b3d26708..9dec587773b5 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -22,10 +22,10 @@ from urllib.parse import urlparse from urllib3.exceptions import SSLError -from ci_tools.variables import in_ci # +from ci_tools.variables import in_ci from .config import PROXY_URL -from .fake_credentials import FAKE_ACCESS_TOKEN, FAKE_ID, SERVICEBUS_FAKE_SAS, SANITIZED +from .fake_credentials import SANITIZED from .helpers import get_http_client, is_live_and_not_recording from .sanitizers import ( add_batch_sanitizers, From 769512ffacf64a1fe3e7586b29429dfc5347118b Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Thu, 5 Feb 2026 02:27:17 +0000 Subject: [PATCH 70/76] skip env --- eng/pipelines/templates/steps/analyze.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/steps/analyze.yml b/eng/pipelines/templates/steps/analyze.yml index 21242a0bc0b5..00fa91e96e35 100644 --- a/eng/pipelines/templates/steps/analyze.yml +++ b/eng/pipelines/templates/steps/analyze.yml @@ -29,7 +29,7 @@ steps: displayName: 'Set Tox Environment Skips' condition: succeededOrFailed() inputs: - scriptPath: 'scripts/devops_tasks/set_tox_environment.py' + scriptPath: 'eng/scripts/set_checks.py' arguments: '"$(TargetingString)" --team-project="$(System.TeamProject)" --service="${{ parameters.ServiceDirectory }}"' - ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: From ce57329963a90356241e81c3d8e43351b967ee28 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 6 Feb 2026 18:49:22 +0000 Subject: [PATCH 71/76] finally resolve the wonky import errors that we were seeing. this SHOULD get solid invocation now --- eng/tools/azure-sdk-tools/azpysdk/Check.py | 41 ++++++++++++----- .../azpysdk/dependency_check.py | 41 ++++------------- .../azpysdk/install_and_test.py | 44 +++++-------------- eng/tools/azure-sdk-tools/azpysdk/main.py | 9 ++-- 4 files changed, 55 insertions(+), 80 deletions(-) diff --git a/eng/tools/azure-sdk-tools/azpysdk/Check.py b/eng/tools/azure-sdk-tools/azpysdk/Check.py index cbc122dfca8f..2c87589f91e4 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/Check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/Check.py @@ -265,14 +265,16 @@ def pip_freeze(self, executable: str) -> None: logger.error(e.stdout) logger.error(e.stderr) - def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: - """ - Builds the pytest arguments used for the given package directory. - - :param package_dir: The package directory to build pytest args for. - :param args: The argparse.Namespace object containing command-line arguments. - :return: A list of pytest arguments. - """ + def _build_pytest_args_base( + self, + package_dir: str, + args: argparse.Namespace, + *, + ignore_globs: Optional[List[str]] = None, + extra_args: Optional[List[str]] = None, + test_target: Optional[str] = None, + ) -> List[str]: + """Build common pytest args for a package directory.""" log_level = os.getenv("PYTEST_LOG_LEVEL", "51") junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") @@ -284,13 +286,30 @@ def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List "--durations=10", "--ignore=azure", "--ignore=.tox", - "--ignore-glob=.venv*", "--ignore=build", "--ignore=.eggs", "--ignore=samples", f"--log-cli-level={log_level}", ] - additional = args.pytest_args if args.pytest_args else [] + for glob in ignore_globs or [".venv*"]: + default_args.append(f"--ignore-glob={glob}") + + pytest_args = [*default_args] - return [*default_args, *additional, package_dir] + if extra_args: + pytest_args.extend(extra_args) + + if getattr(args, "mark_arg", None): + pytest_args.extend(["-m", args.mark_arg]) + + if getattr(args, "pytest_args", None): + pytest_args.extend(args.pytest_args) + + pytest_args.append(test_target or ".") + + return pytest_args + + def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: + """Build pytest args for a package directory.""" + return self._build_pytest_args_base(package_dir, args) diff --git a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py index f5bb6ffb67ef..9acca357a364 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py +++ b/eng/tools/azure-sdk-tools/azpysdk/dependency_check.py @@ -110,12 +110,13 @@ def run(self, args: argparse.Namespace) -> int: continue pytest_args = self._build_pytest_args(package_dir, args) - pytest_command = ["-m", "pytest", *pytest_args] + pytest_command = ["pytest", *pytest_args] pytest_result = self.run_venv_command( executable, pytest_command, - cwd=staging_directory, + cwd=package_dir, immediately_dump=True, + append_executable=False, ) if pytest_result.returncode != 0: @@ -173,34 +174,8 @@ def _verify_installed_packages(self, executable: str, package_dir: str, staging_ return True def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: - log_level = os.getenv("PYTEST_LOG_LEVEL", "51") - junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") - - default_args = [ - f"{package_dir}", - "-rsfE", - f"--junitxml={junit_path}", - "--verbose", - "--cov-branch", - "--durations=10", - "--ignore=azure", - "--ignore=.tox", - "--ignore-glob=.venv*", - "--ignore=build", - "--ignore=.eggs", - "--ignore=samples", - f"--log-cli-level={log_level}", - "--no-cov", - ] - - pytest_args = list(default_args) - - if getattr(args, "mark_arg", None): - pytest_args.extend(["-m", args.mark_arg]) - - if getattr(args, "pytest_args", None): - pytest_args.extend(args.pytest_args) - - pytest_args.append(package_dir) - - return pytest_args + return self._build_pytest_args_base( + package_dir, + args, + extra_args=["--no-cov"], + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py index ad4de899d60f..0d27ff1790a0 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py +++ b/eng/tools/azure-sdk-tools/azpysdk/install_and_test.py @@ -102,17 +102,21 @@ def check_coverage(self, executable: str, package_dir: str, package_name: str) - def run_pytest( self, executable: str, staging_directory: str, package_dir: str, package_name: str, pytest_args: List[str] ) -> int: - pytest_command = ["-m", "pytest", *pytest_args] + pytest_command = ["pytest", *pytest_args] environment = os.environ.copy() environment.update({"PYTHONPYCACHEPREFIX": staging_directory}) + logger.info(f"Running pytest for {package_name} with command: {pytest_command}") + logger.debug(f"with environment vars: {environment}") + pytest_result = self.run_venv_command( executable, pytest_command, cwd=package_dir, immediately_dump=True, additional_environment_settings=environment, + append_executable=False, ) if pytest_result.returncode != 0: if pytest_result.returncode == 5 and is_error_code_5_allowed(package_dir, package_name): @@ -176,34 +180,10 @@ def _install_common_requirements(self, executable: str, package_dir: str) -> Non logger.warning(f"Test tools requirements file not found at {TEST_TOOLS_REQUIREMENTS}.") def _build_pytest_args(self, package_dir: str, args: argparse.Namespace) -> List[str]: - log_level = os.getenv("PYTEST_LOG_LEVEL", "51") - junit_path = os.path.join(package_dir, f"test-junit-{args.command}.xml") - - default_args = [ - f"{package_dir}", - "-rsfE", - f"--junitxml={junit_path}", - "--verbose", - "--cov-branch", - "--durations=10", - "--ignore=azure", - "--ignore=.tox", - "--ignore-glob=**/.venv*", - "--ignore-glob=**/.venv*/**", - "--ignore=build", - "--ignore=.eggs", - "--ignore=samples", - f"--log-cli-level={log_level}", - ] - - pytest_args = [*default_args] - if getattr(args, "mark_arg", None): - pytest_args.extend(["-m", args.mark_arg]) - pytest_args.extend(self.additional_pytest_args) - - if getattr(args, "pytest_args", None): - pytest_args.extend(args.pytest_args) - - pytest_args.append(package_dir) - - return pytest_args + return self._build_pytest_args_base( + package_dir, + args, + ignore_globs=["**/.venv*", "**/.venv*/**"], + extra_args=self.additional_pytest_args, + test_target=package_dir, + ) diff --git a/eng/tools/azure-sdk-tools/azpysdk/main.py b/eng/tools/azure-sdk-tools/azpysdk/main.py index af8702e8931e..f3617c724b51 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/main.py +++ b/eng/tools/azure-sdk-tools/azpysdk/main.py @@ -10,6 +10,7 @@ import argparse import shutil import os +import sys from typing import Sequence, Optional from .import_all import import_all @@ -138,14 +139,14 @@ def main(argv: Optional[Sequence[str]] = None) -> int: try: result = args.func(args) - exit_code = int(result or 0) - os._exit(exit_code) + print(f"{args.command} check completed with exit code {result}") + return int(result or 0) except KeyboardInterrupt: logger.error("Interrupted by user") - os._exit(130) + return 130 except Exception as exc: # pragma: no cover - simple top-level error handling logger.error(f"Error: {exc}") - os._exit(2) + return 2 if __name__ == "__main__": From db3a7690409cd8e6b0852fb20bcacdc922a783df Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 6 Feb 2026 20:14:09 +0000 Subject: [PATCH 72/76] apply the same fix to apply_depend_packages so we can can simply take ours when merging main --- .../ci_tools/scenario/dependency_resolution.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py index 90e7bab9bc52..17338dabc358 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py +++ b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py @@ -234,10 +234,19 @@ def process_requirement(req: str, dependency_type: str, orig_pkg_name: str) -> s ) return "" + # if the specifier includes preview versions, then we can resolve preview versions + # otherwise, we should filter them out + allows_prereleases = spec is not None and spec.prereleases is True + client = PyPIClient() versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)] logger.info("Versions available on PyPI for %s: %s", pkg_name, versions) + # prepass filter before choosing a latest or minimum, eliminate prerelease versions if they are not allowed based on the specifier + if not allows_prereleases: + versions = [v for v in versions if not Version(v).is_prerelease] + logger.info(f"Filtered out pre-release versions for {pkg_name} based on specifier. Remaining versions: {versions}") + versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) if dependency_type == "Latest": From 87327b53039b2849edb6847cb2462730366c1b91 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Fri, 6 Feb 2026 21:48:14 +0000 Subject: [PATCH 73/76] save the progress on this thing. --- eng/pipelines/templates/steps/analyze.yml | 2 +- eng/scripts/dispatch_checks.py | 69 ++++++++++++++++--- eng/scripts/set_checks.py | 2 +- .../scenario/dependency_resolution.py | 4 +- .../devtools_testutils/config.py | 2 + .../devtools_testutils/proxy_testcase.py | 1 + 6 files changed, 68 insertions(+), 12 deletions(-) diff --git a/eng/pipelines/templates/steps/analyze.yml b/eng/pipelines/templates/steps/analyze.yml index 00fa91e96e35..fef2d8d1f0cc 100644 --- a/eng/pipelines/templates/steps/analyze.yml +++ b/eng/pipelines/templates/steps/analyze.yml @@ -26,7 +26,7 @@ steps: DevFeedName: ${{ parameters.DevFeedName }} - task: PythonScript@0 - displayName: 'Set Tox Environment Skips' + displayName: 'Set Checks Environment Skips' condition: succeededOrFailed() inputs: scriptPath: 'eng/scripts/set_checks.py' diff --git a/eng/scripts/dispatch_checks.py b/eng/scripts/dispatch_checks.py index a0f2e5db1d51..1b0d036dab05 100644 --- a/eng/scripts/dispatch_checks.py +++ b/eng/scripts/dispatch_checks.py @@ -6,6 +6,7 @@ import signal import shutil import subprocess +import re from dataclasses import dataclass from typing import IO, List, Optional @@ -15,6 +16,7 @@ from ci_tools.logging import configure_logging, logger from ci_tools.environment_exclusions import is_check_enabled, CHECK_DEFAULTS from devtools_testutils.proxy_startup import prepare_local_tool +from packaging.requirements import Requirement root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) ISOLATE_DIRS_TO_CLEAN: List[str] = [] @@ -68,6 +70,45 @@ def _checks_require_recording_restore(checks: List[str]) -> bool: return any(check in INSTALL_AND_TEST_CHECKS for check in checks) +def _compare_req_to_injected_reqs(parsed_req, injected_packages: List[str]) -> bool: + if parsed_req is None: + return False + return any(parsed_req.name in req for req in injected_packages) + + +def _inject_custom_reqs(req_file: str, injected_packages: str, package_dir: str) -> None: + req_lines = [] + injected_list = [p for p in re.split(r"[\s,]", injected_packages) if p] + + if not injected_list: + return + + logger.info(f"Adding custom packages to requirements for {package_dir}") + with open(req_file, "r") as handle: + for line in handle: + logger.info(f"Attempting to parse {line}") + try: + parsed_req = Requirement(line.strip()) + except Exception as exc: + logger.error(exc) + parsed_req = None + req_lines.append((line, parsed_req)) + + if req_lines: + all_adjustments = injected_list + [ + line_tuple[0].strip() + for line_tuple in req_lines + if line_tuple[0].strip() and not _compare_req_to_injected_reqs(line_tuple[1], injected_list) + ] + else: + all_adjustments = injected_list + + logger.info(f"Generated Custom Reqs: {req_lines}") + + with open(req_file, "w") as handle: + handle.write("\n".join(all_adjustments)) + + async def run_check( semaphore: asyncio.Semaphore, package: str, @@ -183,7 +224,7 @@ def summarize(results: List[CheckResult]) -> int: return worst -async def run_all_checks(packages, checks, max_parallel, wheel_dir, mark_arg: Optional[str]): +async def run_all_checks(packages, checks, max_parallel, wheel_dir, mark_arg: Optional[str], injected_packages: str): """Run all checks for all packages concurrently and return the worst exit code. :param packages: Iterable of package paths to run checks against. @@ -214,17 +255,20 @@ async def run_all_checks(packages, checks, max_parallel, wheel_dir, mark_arg: Op logger.info("Replacing relative requirements in eng/dependency_tools.txt with prebuilt wheels.") replace_dev_reqs(dependency_tools_path, root_dir, wheel_dir) - for pkg in packages: - destination_dev_req = os.path.join(pkg, "dev_requirements.txt") + for pkg in packages: + destination_dev_req = os.path.join(pkg, "dev_requirements.txt") - logger.info(f"Replacing dev requirements w/ path {destination_dev_req}") - if not os.path.exists(destination_dev_req): - logger.info("No dev_requirements present.") - with open(destination_dev_req, "w+") as file: - file.write("\n") + logger.info(f"Replacing dev requirements w/ path {destination_dev_req}") + if not os.path.exists(destination_dev_req): + logger.info("No dev_requirements present.") + with open(destination_dev_req, "w+") as file: + file.write("\n") + if in_ci(): replace_dev_reqs(destination_dev_req, pkg, wheel_dir) + _inject_custom_reqs(destination_dev_req, injected_packages, pkg) + next_proxy_port = BASE_PROXY_PORT for package, check in combos: if not is_check_enabled(package, check, CHECK_DEFAULTS.get(check, True)): @@ -467,7 +511,14 @@ def handler(signum, frame): if in_ci(): logger.info(f"Ensuring {len(checks)} test proxies are running for requested checks...") exit_code = asyncio.run( - run_all_checks(targeted_packages, checks, args.max_parallel, temp_wheel_dir, args.mark_arg) + run_all_checks( + targeted_packages, + checks, + args.max_parallel, + temp_wheel_dir, + args.mark_arg, + args.injected_packages, + ) ) except KeyboardInterrupt: logger.error("Aborted by user.") diff --git a/eng/scripts/set_checks.py b/eng/scripts/set_checks.py index 8055c9ef732c..54369d3535c9 100644 --- a/eng/scripts/set_checks.py +++ b/eng/scripts/set_checks.py @@ -30,7 +30,7 @@ def resolve_devops_variable(var_value: str) -> List[str]: else: return [tox_env.strip() for tox_env in var_value.split(",") if tox_env.strip()] else: - raise ValueError("Provided variable value is empty or None") + return [] def set_devops_value(resolved_set: List[str]) -> None: diff --git a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py index 17338dabc358..5c9859f795f5 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py +++ b/eng/tools/azure-sdk-tools/ci_tools/scenario/dependency_resolution.py @@ -245,7 +245,9 @@ def process_requirement(req: str, dependency_type: str, orig_pkg_name: str) -> s # prepass filter before choosing a latest or minimum, eliminate prerelease versions if they are not allowed based on the specifier if not allows_prereleases: versions = [v for v in versions if not Version(v).is_prerelease] - logger.info(f"Filtered out pre-release versions for {pkg_name} based on specifier. Remaining versions: {versions}") + logger.info( + f"Filtered out pre-release versions for {pkg_name} based on specifier. Remaining versions: {versions}" + ) versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/config.py b/eng/tools/azure-sdk-tools/devtools_testutils/config.py index 9bc15d89cae0..605844dcb152 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/config.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/config.py @@ -14,9 +14,11 @@ ENV_LIVE_TEST = "AZURE_TEST_RUN_LIVE" TEST_SETTING_FILENAME = "testsettings_local.cfg" + def PROXY_URL(): return os.getenv("PROXY_URL", "http://localhost:5000").rstrip("/") + class TestConfig(object): # pylint: disable=too-few-public-methods def __init__(self, parent_parsers=None, config_file=None): parent_parsers = parent_parsers or [] diff --git a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py index 9a6fbf51c72f..a4e109d6f685 100644 --- a/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py +++ b/eng/tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py @@ -51,6 +51,7 @@ PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL()) PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL()) + class RecordedTransport(str, Enum): """Enum for specifying which transports to record in the test proxy.""" From 7fc171e1c481d4316ba205d7c6dc780c5cf93c54 Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 7 Feb 2026 00:00:48 +0000 Subject: [PATCH 74/76] lots of doc updates as well. --- CONTRIBUTING.md | 153 +++++++---------------------- doc/dev/dev_setup.md | 4 +- doc/dev/engineering_assumptions.md | 2 +- doc/dev/pylint_checking.md | 15 ++- doc/dev/sample_guide.md | 10 +- doc/dev/static_type_checking.md | 24 +++-- doc/dev/tests.md | 35 ++++--- doc/eng_sys_checks.md | 86 ++++++++-------- doc/repo_health_status.md | 2 +- doc/tool_usage_guide.md | 2 +- 10 files changed, 122 insertions(+), 211 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e17a4a5472ee..8fade23c59f6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,9 +11,7 @@ If you want to contribute to a file that is generated (header contains `Code gen We utilize a variety of tools to ensure smooth development, testing, and code quality for the Azure Python SDK. Below is a list of key tools and their purpose in the workflow: -- Tox: [Tox](https://tox.wiki/en/latest/) is our primary tool for managing test environments. It allows us to distribute tests to virtual environments, install dependencies, and maintain consistency between local and CI builds. Tox is configured to handle various testing scenarios, including linting, type checks, and running unit tests. - -- Virtualenv: [Virtualenv](https://virtualenv.pypa.io/en/latest/) is leveraged by Tox to create isolated environments for each test suite, ensuring consistent dependencies and reducing conflicts. +- azpysdk: The `azpysdk` CLI is our primary tool for running checks locally and in CI. It is an entrypoint provided by the `eng/tools/azure-sdk-tools` package and abstracts all checks (linting, type checking, tests, doc generation, etc.) behind a single command. See the [Tool Usage Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md) for full details. - UV: [UV](https://docs.astral.sh/uv/) is a fast package manager that can manage Python versions, run and install Python packages, and be used instead of pip, virtualenv, and more. @@ -31,140 +29,65 @@ We utilize a variety of tools to ensure smooth development, testing, and code qu ## Building and Testing -The Azure SDK team's Python CI leverages the tool `tox` to distribute tests to virtual environments, handle test dependency installation, and coordinate tooling reporting during PR/CI builds. This means that a dev working locally can reproduce _exactly_ what the build machine is doing. - -[A Brief Overview of Tox](https://tox.wiki/en/latest/) - -#### A Monorepo and Tox in Harmony - -Traditionally, the `tox.ini` file for a package sits _alongside the setup.py_ in source code. The `azure-sdk-for-python` necessarily does not adhere to this policy. There are over one-hundred packages contained here-in. That's a lot of `tox.ini` files to maintain! - -Instead, the CI system leverages the `--root` argument which is new to `tox4`. The `--root` argument allows `tox` to act as if the `tox.ini` is located in whatever directory you specify! - -#### Tox Environments - -A given `tox.ini` works on the concept of `test environments`. A given test environment is a combination of: - -1. An identifier (or identifiers) -2. A targeted Python version - 1. `tox` will default to base python executing the `tox` command if no Python environment is specified -3. (optionally) an OS platform - -Internally `tox` leverages `virtualenv` to create each test environment's virtual environment. +The Azure SDK team's Python CI leverages the `azpysdk` CLI tool to run checks, tests, and linters during PR/CI builds. This means that a dev working locally can reproduce _exactly_ what the build machine is doing. -This means that once the `tox` workflow is in place, all tests will be executed _within a virtual environment._ +The `azpysdk` entrypoint is provided by the `eng/tools/azure-sdk-tools` package. For full setup instructions and the list of available checks, see the [Tool Usage Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md). -You can use the command `tox list` to list all the environments provided by a `tox.ini` file. You can either use that command in the -same directory as the file itself, or use the `--conf` argument to specify the path to it directly. +### Quick Setup - -Sample output of `tox list`: +From the root of your target package: ``` -sdk-for-python/eng/tox> tox list -default environments: -whl -> Builds a wheel and runs tests -sdist -> Builds a source distribution and runs tests - -additional environments: -pylint -> Lints a package with a pinned version of pylint -next-pylint -> Lints a package with pylint (version 2.15.8) -mypy -> Typechecks a package with mypy (version 1.9.0) -next-mypy -> Typechecks a package with the latest version of mypy -pyright -> Typechecks a package with pyright (version 1.1.287) -next-pyright -> Typechecks a package with the latest version of static type-checker pyright -verifytypes -> Verifies the "type completeness" of a package with pyright -whl_no_aio -> Builds a wheel without aio and runs tests -develop -> Tests a package -sphinx -> Builds a package's documentation with sphinx -depends -> Ensures all modules in a target package can be successfully imported -verifywhl -> Verify directories included in whl and contents in manifest file -verifysdist -> Verify directories included in sdist and contents in manifest file. Also ensures that py.typed configuration is correct within the setup.py -devtest -> Tests a package against dependencies installed from a dev index -latestdependency -> Tests a package against the released, upper-bound versions of its azure dependencies -mindependency -> Tests a package against the released, lower-bound versions of its azure dependencies -apistub -> Generate an api stub of a package ( for https://apiview.dev ) -bandit -> Runs bandit, a tool to find common security issues, against a package -samples -> Runs a package's samples -breaking -> Runs the breaking changes checker against a package +pip install -r dev_requirements.txt ``` -### Example Usage of the common Azure SDK For Python `tox.ini` - -Basic usage of `tox` within this monorepo is: - -1. `pip install "tox<5"` -2. Run `tox run -e ENV_NAME -c path/to/tox.ini --root path/to/python_package` - * **Note**: You can use environment variables to provide defaults for tox config values - * With `TOX_CONFIG_FILE` set to the absolute path of `tox.ini`, you can avoid needing `-c path/to/tox.ini` in your tox invocations - * With `TOX_ROOT_DIR` set to the absolute path to your python package, you can avoid needing `--root path/to/python_package` - -The common `tox.ini` location is `eng/tox/tox.ini` within the repository. - -If at any time you want to blow away the tox created virtual environments and start over, simply append `-r` to any tox invocation! - -#### Example `azure-core` mypy - -1. Run `tox run -e mypy -c ./eng/tox/tox.ini --root sdk/core/azure-core` +This installs `azure-sdk-tools` (which provides `azpysdk`) along with the package's dev dependencies. -#### Example `azure-storage-blob` tests +### Available Checks -2. Execute `tox run -c ./eng/tox/tox.ini --root sdk/storage/azure-storage-blob` - -Note that we didn't provide an `environment` argument for this example. Reason here is that the _default_ environment selected by our common `tox.ini` file is one that runs `pytest`. - -#### `whl` environment -Used for test execution across the spectrum of all the platforms we want to support. Maintained at a `platform specific` level just in case we run into platform-specific bugs. - -* Installs the wheel, runs tests using the wheel +You can discover all available checks by running `azpysdk --help`. Some common checks: ``` -\> tox run -e whl -c --root - +azpysdk pylint . # Lint with pylint +azpysdk mypy . # Type check with mypy +azpysdk pyright . # Type check with pyright +azpysdk verifytypes . # Verify type completeness +azpysdk sphinx . # Build documentation +azpysdk bandit . # Security analysis +azpysdk black . # Code formatting +azpysdk verifywhl . # Verify wheel contents +azpysdk verifysdist . # Verify sdist contents +azpysdk import_all . # Verify all imports resolve +azpysdk apistub . # Generate API stub +azpysdk samples . # Run samples +azpysdk breaking . # Check for breaking changes +azpysdk devtest . # Test against dev feed dependencies ``` -#### `sdist` environment -Used for the local dev loop. +### Running from the repo root -* Installs package in editable mode -* Runs tests using the editable mode installation, not the wheel +`azpysdk` also supports globbing and comma-separated package names when invoked from the repo root: ``` - -\> tox run -e sdist -c --root - +azure-sdk-for-python> azpysdk import_all azure-storage* +azure-sdk-for-python> azpysdk pylint azure-storage-blob,azure-core ``` -#### `pylint` environment -Pylint install and run. - -``` -\> tox run -e pylint -c --root -``` +### Isolated environments - -#### `mypy` environment -Mypy install and run. - -``` -\> tox run -e mypy -c --root -``` - -#### `sphinx` environment -Generate sphinx doc for this package. +To run a check in a completely fresh virtual environment, add `--isolate`: ``` -\> tox run -e sphinx -c --root +azpysdk pylint . --isolate ``` ### Custom Pytest Arguments -`tox` supports custom arguments, and the defined pytest environments within the common `tox.ini` also allow these. Essentially, separate the arguments you want passed to `pytest` by a `--` in your tox invocation. - -[Tox Documentation on Positional Arguments](https://tox.wiki/en/latest/config.html#substitutions-for-positional-arguments-in-commands) +When running test-related checks, you can pass additional arguments to `pytest` after `--`: -**Example: Invoke tox, breaking into the debugger on failure** -`tox run -e whl -c --root -- --pdb` +``` +azpysdk devtest . -- --pdb +``` ### Performance Testing @@ -175,7 +98,7 @@ SDK performance testing is supported via the custom `perfstress` framework. For We maintain an [additional document](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/eng_sys_checks.md) that has a ton of detail as to what is actually _happening_ in these executions. ### Dev Feed -Daily dev build version of Azure sdk packages for python are available and are uploaded to Azure devops feed daily. We have also created a tox environment to test a package against dev built version of dependent packages. Below is the link to Azure devops feed. +Daily dev build version of Azure sdk packages for python are available and are uploaded to Azure devops feed daily. Below is the link to Azure devops feed. [`https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-python`](https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-python) ##### To install latest dev build version of a package @@ -191,13 +114,13 @@ pip install azure-appconfiguration==1.0.0b6.dev20191205001 --extra-index-url htt To test a package being developed against latest dev build version of dependent packages: a. cd to package root folder -b. run tox environment devtest +b. run `azpysdk devtest` ``` -\> tox run -e devtest -c --root +azpysdk devtest . ``` -This tox test( devtest) will fail if installed dependent packages are not dev build version. +This check will fail if installed dependent packages are not dev build version. ## Samples diff --git a/doc/dev/dev_setup.md b/doc/dev/dev_setup.md index b88d97cb4f0f..1306904e5387 100644 --- a/doc/dev/dev_setup.md +++ b/doc/dev/dev_setup.md @@ -33,11 +33,10 @@ or execute the various commands available in the toolbox. 4. Setup your development environment - Install the development requirements for a specific library (located in the `dev_requirements.txt` file at the root of the library), [Tox][tox] and an editable install of your library. For example, to install requirements for `azure-ai-formrecognizer`: + Install the development requirements for a specific library (located in the `dev_requirements.txt` file at the root of the library) and an editable install of your library. This will also install `azure-sdk-tools` which provides the `azpysdk` CLI for running checks. For example, to install requirements for `azure-ai-formrecognizer`: ``` azure-sdk-for-python> cd sdk/formrecognizer/azure-ai-formrecognizer azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install -r dev_requirements.txt - azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install "tox<5" azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install -e . ``` @@ -54,5 +53,4 @@ After following the steps above, you'll be able to run recorded SDK tests with ` [python_website]: https://www.python.org/downloads/ [python_312]: https://apps.microsoft.com/detail/9ncvdn91xzqp [tests]: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md -[tox]: https://tox.wiki/en/latest/ [virtual_environment]: https://docs.python.org/3/tutorial/venv.html diff --git a/doc/dev/engineering_assumptions.md b/doc/dev/engineering_assumptions.md index 7b910c9111c5..5debb67a2ae3 100644 --- a/doc/dev/engineering_assumptions.md +++ b/doc/dev/engineering_assumptions.md @@ -13,7 +13,7 @@ universal=1 Build CI for `azure-sdk-for-python` essentially builds and tests packages in one of two methodologies. ### Individual Packages -1. Leverage `tox` to create wheel, install, and execute tests against newly installed wheel +1. Leverage `azpysdk` to create wheel, install, and execute tests against newly installed wheel 2. Tests each package in isolation (outside of dev_requirements.txt dependencies + necessary `pylint` and `mypy`) ### Global Method diff --git a/doc/dev/pylint_checking.md b/doc/dev/pylint_checking.md index 858a53128a9f..7bee34e36590 100644 --- a/doc/dev/pylint_checking.md +++ b/doc/dev/pylint_checking.md @@ -22,11 +22,11 @@ In the Azure SDK for Python repository, in addition to the standard pylint libra ## How to run Pylint? -One way to run pylint is to run at the package level with tox: +The recommended way to run pylint is with `azpysdk` at the package level: - .../azure-sdk-for-python/sdk/eventgrid/azure-eventgrid>tox run -e pylint -c ../../../eng/tox/tox.ini --root . + .../azure-sdk-for-python/sdk/eventgrid/azure-eventgrid> azpysdk pylint . -If you don't want to use tox, you can also install and run pylint on its own: +If you don't want to use `azpysdk`, you can also install and run pylint on its own: - If taking this approach, in order to run with the pylintrc formatting and the custom pylint checkers you must also install the custom checkers and `SET` the pylintrc path. @@ -36,8 +36,7 @@ If you don't want to use tox, you can also install and run pylint on its own: .../azure-sdk-for-python>SET PYLINTRC="./pylintrc" .../azure-sdk-for-python>pylint ./sdk/eventgrid/azure-eventgrid - Note that you may see different errors if running a different [version of pylint or azure-pylint-guidelines-checker](https://github.com/Azure/azure-sdk-for-python/blob/fdf7c49ea760b1e1698ebbbac48794e8382d8de5/eng/tox/tox.ini#L90) than the one in CI. - + Note that you may see different errors if running a different version of [pylint](https://github.com/Azure/azure-sdk-for-python/blob/main/eng/tools/azure-sdk-tools/azpysdk/pylint.py#L17) or [azure-pylint-guidelines-checker](https://github.com/Azure/azure-sdk-for-python/blob/main/eng/tools/azure-sdk-tools/azpysdk/pylint.py#L61) than the one in CI. # Ignoring Pylint Checkers @@ -58,12 +57,12 @@ In addition to being a part of the CI, the custom pylint checkers are also integ There is now a new step on the CI pipeline called `Run Pylint Next`. This is merely a duplicate of the `Run Pylint` step with the exception that `Run Pylint Next` uses the latest version of pylint and the latest version of the custom pylint checkers. -This next-pylint environment can also be run locally through tox: +This next-pylint check can also be run locally: - tox run -e next-pylint -c ../../../eng/tox/tox.ini --root + azpysdk pylint --next=True The errors generated by the `Run Pylint Next` step will not break your weekly test pipelines, but make sure to fix the warnings so that your client library is up to date for the next pylint release. # How to prepare your SDK for a new pylint update? -Check each client library's `Run Pylint Next` output in the [test-weekly CI pipeline](https://dev.azure.com/azure-sdk/internal/_build?pipelineNameFilter=python%20*%20tests-weekly). If there is no corresponding test-weekly pipeline, run `next-pylint` locally with `tox` as described in [How to run Pylint?](#how-to-run-pylint). In order to ensure that the SDK pipeline will not break when pylint is updated, make sure to address all pylint warnings present. +Check each client library's `Run Pylint Next` output in the [test-weekly CI pipeline](https://dev.azure.com/azure-sdk/internal/_build?pipelineNameFilter=python%20*%20tests-weekly). If there is no corresponding test-weekly pipeline, run `next-pylint` locally with `azpysdk pylint --next=True .` as described in [How to run Pylint?](#how-to-run-pylint). In order to ensure that the SDK pipeline will not break when pylint is updated, make sure to address all pylint warnings present. diff --git a/doc/dev/sample_guide.md b/doc/dev/sample_guide.md index 6a33ec0394c0..ab6ee5b5df88 100644 --- a/doc/dev/sample_guide.md +++ b/doc/dev/sample_guide.md @@ -54,17 +54,14 @@ The given `START`/`END` keywords can be used in a [sphinx literalinclude][sphinx [Literalinclude example][literalinclude] The rendered code snippets are sensitive to the indentation in the sample file. Sphinx will adjust the dedent accordingly to ensure the sample is captured accurately and not accidentally trimmed. -You can preview how published reference documentation will look by running either -- [tox][tox]: `tox run -e sphinx -c ../../../eng/tox/tox.ini --root `. -- [azpysdk](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md): run `azpysdk sphinx .` in the package directory. +You can preview how published reference documentation will look by running +[azpysdk](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md): `azpysdk sphinx .` in the package directory. ## Test run samples in CI live tests Per the [Python guidelines][snippet_guidelines], sample code and snippets should be test run in CI to ensure they remain functional. Samples should be run in the package's live test pipeline which is scheduled to run daily. To ensure samples do get tested as part of regular CI runs, add these [lines][live_tests] to the package's tests.yml. -You can test this CI step locally first with tox or azpysdk: -- To use [tox][tox], run `tox run -e samples -c ../../../eng/tox/tox.ini --root `. -- To use [azpysdk](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md), run `azpysdk samples .` in the package directory. +You can test this CI step locally first with [azpysdk](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md) by running `azpysdk samples .` in the package directory. The `Test Samples` step in CI will rely on the resources provisioned and environment variables used for running the package's tests. @@ -95,7 +92,6 @@ For general how-to with the Python SDK, see the [Azure SDK for Python Overview][ [literalinclude]: https://github.com/Azure/azure-sdk-for-python/blob/7b3dfdca0658f6a4706654556d3142b4bce2b0d1/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_patch.py#L244-L251 [snippet_guidelines]: https://azure.github.io/azure-sdk/python_design.html#code-snippets [live_tests]: https://github.com/Azure/azure-sdk-for-python/blob/7b3dfdca0658f6a4706654556d3142b4bce2b0d1/sdk/translation/tests.yml#L13-L14 -[tox]: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md#tox [msft_samples]: https://learn.microsoft.com/samples/browse/ [python_guidelines]: https://azure.github.io/azure-sdk/python_design.html [document_sdk]: https://review.learn.microsoft.com/help/platform/reference-document-sdk-client-libraries?branch=main diff --git a/doc/dev/static_type_checking.md b/doc/dev/static_type_checking.md index 3356636918a6..a936e2dea167 100644 --- a/doc/dev/static_type_checking.md +++ b/doc/dev/static_type_checking.md @@ -176,20 +176,18 @@ The versions of mypy and pyright that we run in CI are pinned to specific versio version of the type checker ships. All client libraries in the Python SDK repo are automatically opted in to running type checking. If you need to temporarily opt-out of type checking for your client library, see [How to opt out of type checking](#how-to-opt-out-of-type-checking). The easiest way to install and run the type checkers locally is -with [tox](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md#tox). This reproduces the exact type checking -environment run in CI and brings in the third party stub packages necessary. To begin, first install `tox`: - -`pip install tox<5` +with [azpysdk](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md). This reproduces the exact type checking +environment run in CI and brings in the third party stub packages necessary. To begin, install `azure-sdk-tools` by running `pip install -r dev_requirements.txt` from your package directory. ### Run mypy mypy is currently pinned to version [1.9.0](https://pypi.org/project/mypy/1.9.0/). -To run mypy on your library, run the tox mypy env at the package level: +To run mypy on your library, run `azpysdk mypy` at the package level: -`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics>tox run -e mypy -c ../../../eng/tox/tox.ini --root .` +`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics> azpysdk mypy .` -If you don't want to use `tox` you can also install and run mypy on its own: +If you don't want to use `azpysdk` you can also install and run mypy on its own: `pip install mypy==1.9.0` @@ -217,11 +215,11 @@ We pin the version of pyright to version [1.1.287](https://github.com/microsoft/ Note that pyright requires that node is installed. The command-line [wrapper package](https://pypi.org/project/pyright/) for pyright will check if node is in the `PATH`, and if not, will download it at runtime. -To run pyright on your library, run the tox pyright env at the package level: +To run pyright on your library, run `azpysdk pyright` at the package level: -`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics>tox run -e pyright -c ../../../eng/tox/tox.ini --root .` +`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics> azpysdk pyright .` -If you don't want to use `tox` you can also install and run pyright on its own: +If you don't want to use `azpysdk` you can also install and run pyright on its own: `pip install pyright==1.1.287` @@ -249,11 +247,11 @@ The report can be used to view where type hints and docstrings are missing in a verifytypes also reports a type completeness score which is the percentage of known types in the library. This score is used in the CI check to fail if the type completeness of the library worsens from the code in the PR vs. the code in main. -To run verifytypes on your library, run the tox verifytypes env at the package level: +To run verifytypes on your library, run `azpysdk verifytypes` at the package level: -`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics>tox run -e verifytypes -c ../../../eng/tox/tox.ini --root .` +`.../azure-sdk-for-python/sdk/textanalytics/azure-ai-textanalytics> azpysdk verifytypes .` -If you don't want to use `tox` you can also install and run pyright/verifytypes on its own: +If you don't want to use `azpysdk` you can also install and run pyright/verifytypes on its own: `pip install pyright==1.1.287` diff --git a/doc/dev/tests.md b/doc/dev/tests.md index 8b7470dcd183..9dcccee69ab4 100644 --- a/doc/dev/tests.md +++ b/doc/dev/tests.md @@ -11,7 +11,7 @@ testing infrastructure, and demonstrates how to write and run tests for a servic - [Dependency installation](#dependency-installation) - [Open code in IDE](#open-code-in-ide) - [Integrate with the pytest test framework](#integrate-with-the-pytest-test-framework) - - [Tox](#tox) + - [Running Checks Locally](#running-checks-locally) - [The `devtools_testutils` package](#the-devtools_testutils-package) - [Write or run tests](#write-or-run-tests) - [Set up test resources](#set-up-test-resources) @@ -119,34 +119,33 @@ If you have print statements in your tests for debugging you can add the `-s` fl (env) azure-sdk-for-python\sdk\my-service\my-package> pytest -s ``` -## Tox +## Running Checks Locally -The Python SDK uses the [tox project](https://tox.wiki/en/latest/) to automate releases, run tests, run linters, and build our documentation. The `tox.ini` file is located at `azure-sdk-for-python/eng/tox/tox.ini` for reference. You do not need to make any changes to the tox file for tox to work with your project. Tox will create a directory (`.tox`) in the head of your branch. The first time you run tox commands it may take several moments, but subsequent runs will be quicker. To install tox run the following command from within your virtual environment. -`(env) > pip install "tox<5"`. +The Python SDK uses the `azpysdk` CLI to run linters, type checkers, tests, and build documentation. The `azpysdk` entrypoint is provided by the `eng/tools/azure-sdk-tools` package and is installed as part of each package's `dev_requirements.txt`. For full setup instructions and the list of available checks, see the [Tool Usage Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md). -To run a tox command from your directory use the following commands: +To run checks from your package directory: ```cmd -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e sphinx -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e pylint -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e mypy -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e pyright -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e verifytypes -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e whl -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e sdist -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e samples -c ../../../eng/tox/tox.ini --root . -(env) azure-sdk-for-python\sdk\my-service\my-package> tox run -e apistub -c ../../../eng/tox/tox.ini --root . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk sphinx . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk pylint . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk mypy . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk pyright . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk verifytypes . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk verifywhl . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk verifysdist . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk samples . +(env) azure-sdk-for-python\sdk\my-service\my-package> azpysdk apistub . ``` -A quick description of the nine commands above: +A quick description of the commands above: - sphinx: documentation generation using the inline comments written in our code -- lint: runs pylint to make sure our code adheres to the style guidance +- pylint: runs pylint to make sure our code adheres to the style guidance - mypy: runs the mypy static type checker for Python to make sure that our types are valid. - pyright: runs the pyright static type checker for Python to make sure that our types are valid. - verifytypes: runs pyright's verifytypes tool to verify the type completeness of the library. -- whl: creates a whl package for installing our package -- sdist: creates a zipped distribution of our files that the end user could install with pip +- verifywhl: verifies the wheel contents and manifest +- verifysdist: verifies the sdist contents and manifest - samples: runs all of the samples in the `samples` directory and verifies they are working correctly - apistub: runs the [apistubgenerator](https://github.com/Azure/azure-sdk-tools/tree/main/packages/python-packages/apiview-stub-generator) tool on your code diff --git a/doc/eng_sys_checks.md b/doc/eng_sys_checks.md index 645a1ea48933..634310225865 100644 --- a/doc/eng_sys_checks.md +++ b/doc/eng_sys_checks.md @@ -2,7 +2,7 @@ - [Azure SDK for Python - Engineering System](#azure-sdk-for-python---engineering-system) - [Targeting a specific package at build queue time](#targeting-a-specific-package-at-build-queue-time) - - [Skipping a tox test environment at build queue time](#skipping-a-tox-test-environment-at-build-queue-time) + - [Skipping a check at build queue time](#skipping-a-check-at-build-queue-time) - [Skipping entire sections of builds](#skipping-entire-sections-of-builds) - [The pyproject.toml](#the-pyprojecttoml) - [Coverage Enforcement](#coverage-enforcement) @@ -22,7 +22,7 @@ - [Running locally](#running-locally) - [Change log verification](#change-log-verification) - [PR Validation Checks](#pr-validation-checks) - - [PR validation tox test environments](#pr-validation-tox-test-environments) + - [PR validation checks](#pr-validation-checks-1) - [whl](#whl) - [sdist](#sdist) - [depends](#depends) @@ -38,7 +38,7 @@ There are various tests currently enabled in Azure pipeline for Python SDK and some of them are enabled only for nightly CI checks. We also run some static analysis tool to verify code completeness, security and lint check. -Check the [contributing guide](https://github.com/Azure/azure-sdk-for-python/blob/main/CONTRIBUTING.md#building-and-testing) for an intro to `tox`. For a deeper dive into the tooling that enables the CI checks below and additional detail on reproducing builds locally please refer to the azure-sdk-tools README.md. +Check the [contributing guide](https://github.com/Azure/azure-sdk-for-python/blob/main/CONTRIBUTING.md#building-and-testing) for an intro to `azpysdk`. For a deeper dive into the tooling that enables the CI checks below and additional detail on reproducing builds locally please refer to the [Tool Usage Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md). As a contributor, you will see the build jobs run in two modes: `Nightly Scheduled` and `Pull Request`. @@ -48,8 +48,8 @@ Example PR build: ![res/job_snippet.png](res/job_snippet.png) -* `Analyze` tox envs run during the `Analyze job. -* `Test _` runs PR/Nightly tox envs, depending on context. +* `Analyze` checks run during the `Analyze` job. +* `Test _` runs PR/Nightly checks, depending on context. ## Targeting a specific package at build queue time @@ -60,20 +60,20 @@ In both `public` and `internal` projects, all builds allow a filter to be introd 1. For example, setting filter string `azure-mgmt-*` will filter a build to only management packages. A value of `azure-keyvault-secrets` will result in only building THAT specific package. 3. Once it's set, run the build! -## Skipping a tox test environment at build queue time +## Skipping a check at build queue time -All build definitions allow choice at queue time as to which `tox` environments actually run during the test phase. +All build definitions allow choice at queue time as to which checks actually run during the test phase. 1. Find your target service `internal` build. 2. Click `Run New`. -3. Before clicking `run` against `main` or your target commit, click `Variables` and add a variable of name `Run.ToxCustomEnvs`. The value should be a comma separated list of tox environments that you want to run in the test phase. +3. Before clicking `run` against `main` or your target commit, click `Variables` and add a variable of name `Run.ToxCustomEnvs`. The value should be a comma separated list of checks that you want to run in the test phase. 4. Once it's set, run the build! -This is an example setting of that narrows the default set from `whl, sdist, depends, latestdependency, minimumdependency`. +This is an example setting of that narrows the default set from `whl, sdist, depends, latestdependency, mindependency`. ![res/queue_time_variable.png](res/queue_time_variable.png) -Any combination of valid valid tox environments will work. Reference either this document or the file present at `eng/tox/tox.ini` to find what options are available. +Any combination of valid check names will work. Reference either this document, `azpysdk -h`, or the [Tool Usage Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/tool_usage_guide.md) to find what options are available. ## Skipping entire sections of builds @@ -126,16 +126,16 @@ We default to **enabling** most of our checks like `pylint`, `mypy`, etc. Due to Here's an example: ```toml -# from sdk/core/azure-servicemanagement-legacy/pyproject.toml, which is a legacy package +# from sdk/core/azure-common/pyproject.toml, which is a legacy package # as a result, all of these checks are disabled [tool.azure-sdk-build] -mypy = false type_check_samples = false verifytypes = false pyright = false +mypy = false pylint = false +regression = false black = false -sphinx = false ``` If a package does not yet have a `pyproject.toml`, creating one with just the section `[tool.azure-sdk-build]` will do no harm to the release of the package in question. @@ -154,15 +154,15 @@ After it is implemented, the `relative_cov` key will enable the prevention of ** ## Environment variables important to CI -There are a few differences from a standard local invocation of `tox `. Primarily, these differences adjust the checks to be friendly to parallel invocation. These adjustments are necessary to prevent random CI crashes. +There are a few differences from a standard local invocation of `azpysdk `. Primarily, these differences adjust the checks to be friendly to parallel invocation. These adjustments are necessary to prevent random CI crashes. | Environment Variable | Affect on Build | |---|---| -| `TF_BUILD` | EngSys uses the presence of any value in this variable as the bit indicating "in CI" or not. The primary effect of this is that all relative dev dependencies will be prebuilt prior to running the tox environments. | -| `PREBUILT_WHEEL_DIR` | Setting this env variables means that instead of generating a fresh wheel or sdist to test, `tox` will look in this directory for the targeted package. | +| `TF_BUILD` | EngSys uses the presence of any value in this variable as the bit indicating "in CI" or not. The primary effect of this is that all relative dev dependencies will be prebuilt prior to running the checks. | +| `PREBUILT_WHEEL_DIR` | Setting this env variable means that instead of generating a fresh wheel or sdist to test, the check will look in this directory for the targeted package. | | `PIP_INDEX_URL` | Standard `pip` environment variable. During nightly `alpha` builds, this environment variable is set to a public dev feed. | -The various tooling abstracted by the environments within `eng/tox/tox.ini` take the above variables into account automatically. +The various tooling abstracted by `azpysdk` takes the above variables into account automatically. ### Atomic Overrides @@ -177,7 +177,7 @@ To temporarily **override** this restriction, a dev need only set the queue time This same methodology also applies to _individual checks_ that run during various phases of CI. Developers can use a queue time variable of format `PACKAGE_NAME_CHECK=true/false`. -The name that you should use is visible based on what the `tox environment` that the check refers to! Here are a few examples of enabling/disabling checks: +The name that you should use is visible based on the check name. Here are a few examples of enabling/disabling checks: - `AZURE_SERVICEBUS_PYRIGHT=true` <-- enable a check that normally is disabled in `pyproject.toml` - `AZURE_CORE_PYLINT=false` <-- disable a check that normally runs @@ -188,7 +188,7 @@ You can enable test logging in a pipeline by setting the queue time variable `PY `PYTEST_LOG_LEVEL=INFO` -This also works locally with tox by setting the `PYTEST_LOG_LEVEL` environment variable. +This also works locally by setting the `PYTEST_LOG_LEVEL` environment variable. Note that if you want DEBUG level logging with sensitive information unredacted in the test logs, then you still must pass `logging_enable=True` into the client(s) being used in tests. @@ -201,14 +201,14 @@ Analyze job in both nightly CI and pull request validation pipeline runs a set o [`MyPy`](https://pypi.org/project/mypy/) is a static analysis tool that runs type checking of python package. Following are the steps to run `MyPy` locally for a specific package: 1. Go to root of the package -2. Execute following command: `tox run -e mypy -c ../../../eng/tox/tox.ini --root .` +2. Execute following command: `azpysdk mypy .` ### Pyright [`Pyright`](https://github.com/microsoft/pyright/) is a static analysis tool that runs type checking of python package. Following are the steps to run `pyright` locally for a specific package: 1. Go to root of the package -2. Execute following command: `tox run -e pyright -c ../../../eng/tox/tox.ini --root .` +2. Execute following command: `azpysdk pyright .` ### Verifytypes @@ -216,7 +216,7 @@ Analyze job in both nightly CI and pull request validation pipeline runs a set o [`Verifytypes`](https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#verifying-type-completeness) is a feature of pyright that checks the type completeness of a python package. Following are the steps to run `verifytypes` locally for a specific package: 1. Go to root of the package -2. Execute following command: `tox run -e verifytypes -c ../../../eng/tox/tox.ini --root .` +2. Execute following command: `azpysdk verifytypes .` ### Pylint @@ -224,9 +224,9 @@ Analyze job in both nightly CI and pull request validation pipeline runs a set o [`Pylint`](https://pypi.org/project/pylint/) is a static analysis tool to run lint checking, it is automatically run on all PRs. Following are the steps to run `pylint` locally for a specific package. 1. Go to root of the package. -2. Execute following command: `tox run -e pylint -c ../../../eng/tox/tox.ini --root .` +2. Execute following command: `azpysdk pylint .` -Note that the `pylint` environment is configured to run against the **earliest supported python version**. This means that users **must** have `python 3.7` installed on their machine to run this check locally. +Note that the `pylint` check is configured to run against the **earliest supported python version**. This means that users **must** have `python 3.9` installed on their machine to run this check locally. ### Sphinx and docstring checker @@ -234,14 +234,14 @@ Note that the `pylint` environment is configured to run against the **earliest s fail if docstring are invalid, helping to ensure the resulting documentation will be of high quality. Following are the steps to run `sphinx` locally for a specific package with strict docstring checking: 1. Go to root of the package. -2. Execute following command: `tox run -e sphinx -c ../../../eng/tox/tox.ini --root .` +2. Execute following command: `azpysdk sphinx .` ### Bandit `Bandit` is static security analysis tool. This check is triggered for all Azure SDK package as part of analyze job. Following are the steps to `Bandit` tool locally for a specific package. 1. Got to package root directory. -2. Execute command: `tox run -e bandit -c ../../../eng/tox/tox.ini --root .` +2. Execute command: `azpysdk bandit .` ### ApiStubGen @@ -267,9 +267,7 @@ to opt into the black invocation. #### Running locally 1. Go to package root directory. -2. Execute command: `tox run -e black -c ../../../eng/tox/tox.ini --root . -- .` - -**Tip**: You can provide any arguments that `black` accepts after the `--`. Example: `tox run -e black -c ../../../eng/tox/tox.ini --root . -- path/to/file.py` +2. Execute command: `azpysdk black .` ### Change log verification @@ -281,39 +279,39 @@ Each pull request runs various tests using `pytest` in addition to all the tests |`Python Version`|`Platform` | |--|--| -|2.7|Linux| -|3.5|Windows| -|3.8|Linux| +|3.9|Linux| +|3.9|Windows| +|3.13|Linux| -### PR validation tox test environments +### PR validation checks -Tests are executed using tox environment and following are the tox test names that are part of pull request validation +Tests are executed as part of pull request validation. Following are the checks that are part of pull request validation: #### whl -This test installs wheel of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this test environment locally. +This test installs wheel of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this check locally. 1. Go to package root folder on a command line 2. Run following command - `tox run -e whl -c ../../../eng/tox/tox.ini --root .` + `azpysdk whl .` #### sdist -This test installs sdist of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this test environment locally. +This test installs sdist of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this check locally. 1. Go to package root folder on a command line 2. Run following command - `tox run -e sdist -c ../../../eng/tox/tox.ini --root .` + `azpysdk sdist .` #### depends The `depends` check ensures all modules in a target package can be successfully imported. Actually installing and importing will verify that all package requirements are properly set in setup.py and that the `__all__` set for the package is properly defined. This test installs the package and its required packages, then executes `from import *`. For example from `azure-core`, the following would be invoked: `from azure.core import *`. -Following is the command to run this test environment locally. +Following is the command to run this check locally. 1. Go to package root folder on a command line 2. Run following command - `tox run -e sdist -c ../../../eng/tox/tox.ini --root .` + `azpysdk import_all .` ## Nightly CI Checks @@ -358,10 +356,10 @@ Note: Any dependency mentioned only in dev_requirements are not considered to id 4. Install current package that is being tested 5. Run pytest of all test cases in current package -Tox name of this test is `latestdependency` and steps to manually run this test locally is as follows. +Steps to manually run this test locally: 1. Go to package root. For e.g azure-storage-blob or azure-identity -2. Run command `tox run -e latestdependency -c ../../../eng/tox/tox.ini --root .` +2. Run command `azpysdk latestdependency .` #### Minimum Dependency Test @@ -374,11 +372,11 @@ Note: Any dependency mentioned only in dev_requirements are not considered to id 4. Install current package that is being tested 5. Run pytest of all test cases in current package -Tox name of this test is `mindependency` and steps to manually run this test locally is as follows. +Steps to manually run this test locally: 1. Go to package root. For e.g azure-storage-blob or azure-identity 2. Run following command -`tox run -e mindependency -c ../../../eng/tox/tox.ini --root .` +`azpysdk mindependency .` #### Regression Test diff --git a/doc/repo_health_status.md b/doc/repo_health_status.md index ff573ec6e171..4bdb81179d4d 100644 --- a/doc/repo_health_status.md +++ b/doc/repo_health_status.md @@ -76,7 +76,7 @@ This is the overall status of your library and indicates whether you can release ### Tests - CI (required check): -[Tests - CI](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/eng_sys_checks.md#pr-validation-tox-test-environments) checks the status of the most recent (python - {service-directory})scheduled build of your library's recorded tests. This is the same CI that will run when triggering a release build. To learn more about tests in our repo, see our [Testing Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md). Possible statuses include: +[Tests - CI](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/eng_sys_checks.md#pr-validation-checks) checks the status of the most recent (python - {service-directory})scheduled build of your library's recorded tests. This is the same CI that will run when triggering a release build. To learn more about tests in our repo, see our [Testing Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md). Possible statuses include: - $${\color{red}FAIL}$$ - The library is failing CI recorded tests. Check the build result and address the errors present. This will block the release of your library and should be fixed immediately. - $${\color{yellow}DISABLED}$$ - The library has its CI disabled due to non-compliance with required checks. Please take action to re-enable and fix all checks highlighted in yellow. Once all checks are fixed, you can remove the `ci_enabled=false` from your library's pyproject.toml file. diff --git a/doc/tool_usage_guide.md b/doc/tool_usage_guide.md index 4d6ecf55f865..ab034e6d4db2 100644 --- a/doc/tool_usage_guide.md +++ b/doc/tool_usage_guide.md @@ -11,7 +11,7 @@ A `tool` in this context is merely a single entrypoint provided by the `azpysdk` ## Available Tools -This repo is currently migrating all checks from a slower `tox`-based framework, to a lightweight implementation that uses `asyncio` to simultaneously run checks. This tools list is the current set that has been migrated from `tox` to the `azpysdk` entrypoint. +The following checks are available via the `azpysdk` entrypoint. |tool|description|invocation| |---|---|---| From 0632f9d1599620ac632311c718cf5b7fb4c7163a Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 7 Feb 2026 00:05:32 +0000 Subject: [PATCH 75/76] Run.ToxCustomEnvs -> ChecksOverride --- doc/eng_sys_checks.md | 2 +- eng/pipelines/templates/steps/build-test.yml | 2 +- eng/scripts/set_checks.py | 2 +- .../cosmos-emulator-internal-matrix.json | 28 +++++++++---------- sdk/cosmos/cosmos-emulator-public-matrix.json | 16 +++++------ 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/doc/eng_sys_checks.md b/doc/eng_sys_checks.md index 634310225865..2c852817e3dc 100644 --- a/doc/eng_sys_checks.md +++ b/doc/eng_sys_checks.md @@ -66,7 +66,7 @@ All build definitions allow choice at queue time as to which checks actually run 1. Find your target service `internal` build. 2. Click `Run New`. -3. Before clicking `run` against `main` or your target commit, click `Variables` and add a variable of name `Run.ToxCustomEnvs`. The value should be a comma separated list of checks that you want to run in the test phase. +3. Before clicking `run` against `main` or your target commit, click `Variables` and add a variable of name `ChecksOverride`. The value should be a comma separated list of checks that you want to run in the test phase. 4. Once it's set, run the build! This is an example setting of that narrows the default set from `whl, sdist, depends, latestdependency, mindependency`. diff --git a/eng/pipelines/templates/steps/build-test.yml b/eng/pipelines/templates/steps/build-test.yml index decf5040e51f..4bb6c2377d20 100644 --- a/eng/pipelines/templates/steps/build-test.yml +++ b/eng/pipelines/templates/steps/build-test.yml @@ -55,7 +55,7 @@ steps: scriptPath: 'eng/scripts/set_checks.py' arguments: >- --unsupported="$(UnsupportedToxEnvironments)" - --override="$(Run.ToxCustomEnvs)" + --override="$(ChecksOverride)" --team-project="$(System.TeamProject)" - ${{ parameters.BeforeTestSteps }} diff --git a/eng/scripts/set_checks.py b/eng/scripts/set_checks.py index 54369d3535c9..865edd1b4777 100644 --- a/eng/scripts/set_checks.py +++ b/eng/scripts/set_checks.py @@ -93,7 +93,7 @@ def process_ci_skips(glob_string: str, service: str) -> None: "-o", "--override", dest="override_set", - help='If you have a set of tox environments that should override the defaults, provide it here. In CI this is runtime variable $(Run.ToxCustomEnvs). EG: "whl,sdist".', + help='If you have a set of tox environments that should override the defaults, provide it here. In CI this is runtime variable $(ChecksOverride). EG: "whl,sdist".', ) parser.add_argument( diff --git a/sdk/cosmos/cosmos-emulator-internal-matrix.json b/sdk/cosmos/cosmos-emulator-internal-matrix.json index 71362ef80275..67c575f5c218 100644 --- a/sdk/cosmos/cosmos-emulator-internal-matrix.json +++ b/sdk/cosmos/cosmos-emulator-internal-matrix.json @@ -19,87 +19,87 @@ "PythonVersion": "3.9", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.10 Standard": { "PythonVersion": "3.10", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.12 Standard": { "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.13 Standard": { "PythonVersion": "3.13", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.9 Special": { "PythonVersion": "3.9", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "depends,whl_no_aio" + "ChecksOverride": "depends,whl_no_aio" }, "Emulator Tests Python 3.10 Special": { "PythonVersion": "3.10", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "depends,whl_no_aio" + "ChecksOverride": "depends,whl_no_aio" }, "Emulator Tests Python 3.12 Special": { "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "depends,whl_no_aio" + "ChecksOverride": "depends,whl_no_aio" }, "Emulator Tests Python 3.13 Special": { "PythonVersion": "3.13", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "depends,whl_no_aio" + "ChecksOverride": "depends,whl_no_aio" }, "Emulator Tests Python 3.14 Special": { "PythonVersion": "3.14", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "depends,whl_no_aio" + "ChecksOverride": "depends,whl_no_aio" }, "Emulator Tests Python 3.9 Dependency Checks": { "PythonVersion": "3.9", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "latestdependency,mindependency" + "ChecksOverride": "latestdependency,mindependency" }, "Emulator Tests Python 3.10 Dependency Checks": { "PythonVersion": "3.10", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "latestdependency,mindependency" + "ChecksOverride": "latestdependency,mindependency" }, "Emulator Tests Python 3.12 Dependency Checks": { "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "latestdependency,mindependency" + "ChecksOverride": "latestdependency,mindependency" }, "Emulator Tests Python 3.13 Dependency Checks": { "PythonVersion": "3.13", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "latestdependency,mindependency" + "ChecksOverride": "latestdependency,mindependency" }, "Emulator Tests Python 3.14 Dependency Checks": { "PythonVersion": "3.14", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "latestdependency,mindependency" + "ChecksOverride": "latestdependency,mindependency" } } } diff --git a/sdk/cosmos/cosmos-emulator-public-matrix.json b/sdk/cosmos/cosmos-emulator-public-matrix.json index f78ca6ef6654..3278fe204843 100644 --- a/sdk/cosmos/cosmos-emulator-public-matrix.json +++ b/sdk/cosmos/cosmos-emulator-public-matrix.json @@ -19,50 +19,50 @@ "PythonVersion": "3.10", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.12 Standard": { "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.13 Standard": { "PythonVersion": "3.13", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.14 Standard": { "PythonVersion": "3.14", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "whl,sdist" + "ChecksOverride": "whl,sdist" }, "Emulator Tests Python 3.9 Dependency Checks": { "PythonVersion": "3.9", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "mindependency" + "ChecksOverride": "mindependency" }, "Emulator Tests Python 3.10 Dependency Checks": { "PythonVersion": "3.10", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "mindependency" + "ChecksOverride": "mindependency" }, "Emulator Tests Python 3.12 Dependency Checks": { "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "mindependency" + "ChecksOverride": "mindependency" }, "Emulator Tests Python 3.13 Dependency Checks": { "PythonVersion": "3.13", "CoverageArg": "--disablecov", "TestSamples": "false", - "Run.ToxCustomEnvs": "mindependency" + "ChecksOverride": "mindependency" } } } From e0c30ed9af8021ba8529083c9e51e8be087ea10b Mon Sep 17 00:00:00 2001 From: Scott Beddall Date: Sat, 7 Feb 2026 00:13:36 +0000 Subject: [PATCH 76/76] remove any and all references to the build extra on azure-sdk-tools --- .github/workflows/azure-sdk-tools.yml | 8 ++++---- doc/dev/conda-builds.md | 2 +- eng/pipelines/templates/jobs/build-conda-dependencies.yml | 2 +- eng/pipelines/templates/steps/build-conda-artifacts.yml | 2 +- scripts/auto_release/PythonSdkLiveTest.yml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index 00b6d99ba530..bc9f87afa3da 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -21,7 +21,7 @@ jobs: - name: Install azure-sdk-tools run: | - python -m pip install -e eng/tools/azure-sdk-tools[build,ghtools,conda] + python -m pip install -e eng/tools/azure-sdk-tools[ghtools,conda] python -m pip freeze shell: bash @@ -43,7 +43,7 @@ jobs: - name: Install azure-sdk-tools run: | - python -m pip install -e eng/tools/azure-sdk-tools[build,ghtools,conda] + python -m pip install -e eng/tools/azure-sdk-tools[ghtools,conda] python -m pip install black==24.4.0 python -m pip freeze shell: bash @@ -70,7 +70,7 @@ jobs: - name: Install azure-sdk-tools on in global uv, discover azpysdk checks run: | - uv pip install --system eng/tools/azure-sdk-tools[build,ghtools,conda,systemperf] + uv pip install --system eng/tools/azure-sdk-tools[ghtools,conda,systemperf] # Discover available azpysdk commands from the {command1,command2,...} line in help output CHECKS=$(azpysdk -h 2>&1 | \ @@ -100,7 +100,7 @@ jobs: - name: Install azure-sdk-tools on global pip env run: | - python -m pip install -e eng/tools/azure-sdk-tools[build,ghtools,conda] + python -m pip install -e eng/tools/azure-sdk-tools[ghtools,conda] shell: bash - name: Run all discovered checks against azure-template using pip as package manager diff --git a/doc/dev/conda-builds.md b/doc/dev/conda-builds.md index 413257320fea..72e845cea1eb 100644 --- a/doc/dev/conda-builds.md +++ b/doc/dev/conda-builds.md @@ -22,7 +22,7 @@ Follow the instructions [here](https://docs.conda.io/projects/conda-build/en/lat ```bash # cd -pip install "eng/tools/azure-sdk-tools[build,conda]" +pip install "eng/tools/azure-sdk-tools[conda]" ``` ### Get the configuration blob diff --git a/eng/pipelines/templates/jobs/build-conda-dependencies.yml b/eng/pipelines/templates/jobs/build-conda-dependencies.yml index c1cc771994c7..64d13327bfe2 100644 --- a/eng/pipelines/templates/jobs/build-conda-dependencies.yml +++ b/eng/pipelines/templates/jobs/build-conda-dependencies.yml @@ -28,7 +28,7 @@ jobs: - pwsh: | $ErrorActionPreference = 'Stop' $PSNativeCommandUseErrorActionPreference = $true - pip install "eng/tools/azure-sdk-tools[build,conda]" + pip install "eng/tools/azure-sdk-tools[conda]" pip install disutils Invoke-WebRequest "$(VS_INSTALLER_URL)" -OutFile "$(VS_INSTALLER_PATH)" # In order of component appearance in the install command below, these are the names of the components diff --git a/eng/pipelines/templates/steps/build-conda-artifacts.yml b/eng/pipelines/templates/steps/build-conda-artifacts.yml index 734f3f67f62a..57a0bb875375 100644 --- a/eng/pipelines/templates/steps/build-conda-artifacts.yml +++ b/eng/pipelines/templates/steps/build-conda-artifacts.yml @@ -18,7 +18,7 @@ steps: - pwsh: | $ErrorActionPreference = 'Stop' $PSNativeCommandUseErrorActionPreference = $true - python -m pip install "eng/tools/azure-sdk-tools[build,conda]" + python -m pip install "eng/tools/azure-sdk-tools[conda]" python -m pip install disutils python -m pip install typing-extensions==4.12.2 displayName: Install build script requirements diff --git a/scripts/auto_release/PythonSdkLiveTest.yml b/scripts/auto_release/PythonSdkLiveTest.yml index bebc4c1d587c..6ca6dcd57580 100644 --- a/scripts/auto_release/PythonSdkLiveTest.yml +++ b/scripts/auto_release/PythonSdkLiveTest.yml @@ -97,7 +97,7 @@ jobs: export ISSUE_OWNER=$(ISSUE_OWNER) # install azure-sdk-tools - python -m pip install $root_path/eng/tools/azure-sdk-tools[build,ghtools,sdkgenerator] + python -m pip install $root_path/eng/tools/azure-sdk-tools[ghtools,sdkgenerator] # install requirements python -m pip install -r $script_path/requirement.txt