Skip to content

Commit 177efe0

Browse files
committed
updates to dependency checking
1 parent f709a23 commit 177efe0

File tree

4 files changed

+359
-396
lines changed

4 files changed

+359
-396
lines changed

eng/tools/azure-sdk-tools/azpysdk/dependency_check.py

Lines changed: 10 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from ci_tools.functions import install_into_venv, is_error_code_5_allowed
1010
from ci_tools.scenario.generation import create_package_and_install
11+
from ci_tools.scenario.dependency_resolution import install_dependent_packages
1112
from ci_tools.variables import discover_repo_root, set_envvar_defaults
1213
from ci_tools.logging import logger
1314

@@ -64,28 +65,16 @@ def run(self, args: argparse.Namespace) -> int:
6465
results.append(exc.returncode)
6566
continue
6667

67-
install_script = os.path.join(REPO_ROOT, "eng/tox/install_depend_packages.py")
68-
install_command = [
69-
install_script,
70-
"-t",
71-
package_dir,
72-
"-d",
73-
self.dependency_type,
74-
"-w",
75-
staging_directory,
76-
]
77-
install_result = self.run_venv_command(
78-
executable,
79-
install_command,
80-
cwd=package_dir,
81-
immediately_dump=True,
82-
)
83-
84-
if install_result.returncode != 0:
85-
logger.error(
86-
f"install_depend_packages.py failed for {package_name} with exit code {install_result.returncode}."
68+
try:
69+
install_dependent_packages(
70+
setup_py_file_path=package_dir,
71+
dependency_type=self.dependency_type,
72+
temp_dir=staging_directory,
73+
python_executable=executable,
8774
)
88-
results.append(install_result.returncode)
75+
except Exception as exc: # pragma: no cover - defensive logging
76+
logger.error(f"Dependency resolution failed for {package_name}: {exc}")
77+
results.append(1)
8978
continue
9079

9180
try:
Lines changed: 338 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,338 @@
1+
"""Utilities for resolving dependency sets for tox-style checks.
2+
3+
This module contains the logic previously hosted in ``eng/tox/install_depend_packages.py``
4+
so that both the legacy tox entry point and the azpysdk checks can share a
5+
single implementation.
6+
"""
7+
8+
import logging
9+
import os
10+
import re
11+
import subprocess
12+
import sys
13+
from typing import Callable, List, Optional
14+
15+
from packaging.requirements import Requirement
16+
from packaging.specifiers import SpecifierSet
17+
from packaging.version import Version
18+
from pypi_tools.pypi import PyPIClient
19+
20+
from ci_tools.functions import (
21+
compare_python_version,
22+
get_pip_command,
23+
handle_incompatible_minimum_dev_reqs,
24+
)
25+
from ci_tools.parsing import ParsedSetup, parse_require
26+
27+
logger = logging.getLogger(__name__)
28+
29+
DEV_REQ_FILE = "dev_requirements.txt"
30+
NEW_DEV_REQ_FILE = "new_dev_requirements.txt"
31+
PKGS_TXT_FILE = "packages.txt"
32+
33+
# GENERIC_OVERRIDES dictionaries pair a specific dependency with a MINIMUM or MAXIMUM inclusive bound.
34+
# During LATEST and MINIMUM dependency checks, we sometimes need to ignore versions for various compatibility
35+
# reasons.
36+
MINIMUM_VERSION_GENERIC_OVERRIDES = {
37+
"azure-common": "1.1.10",
38+
"msrest": "0.6.10",
39+
"typing-extensions": "4.6.0",
40+
"opentelemetry-api": "1.3.0",
41+
"opentelemetry-sdk": "1.3.0",
42+
"azure-core": "1.11.0",
43+
"requests": "2.19.0",
44+
"six": "1.12.0",
45+
"cryptography": "41.0.0",
46+
"msal": "1.23.0",
47+
"azure-storage-file-datalake": "12.2.0",
48+
}
49+
50+
MAXIMUM_VERSION_GENERIC_OVERRIDES = {}
51+
52+
# SPECIFIC OVERRIDES provide additional filtering of upper and lower bound by
53+
# binding an override to the specific package being processed. As an example, when
54+
# processing the latest or minimum deps for "azure-eventhub", the minimum version of "azure-core"
55+
# will be overridden to 1.25.0.
56+
MINIMUM_VERSION_SPECIFIC_OVERRIDES = {
57+
"azure-eventhub": {"azure-core": "1.25.0"},
58+
"azure-eventhub-checkpointstoreblob-aio": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"},
59+
"azure-eventhub-checkpointstoreblob": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"},
60+
"azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"},
61+
"azure-identity": {"msal": "1.23.0"},
62+
"azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"},
63+
"azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"},
64+
"azure-cosmos": {"azure-core": "1.30.0"},
65+
"azure-appconfiguration-provider": {"azure-appconfiguration": "1.7.2"},
66+
"azure-ai-evaluation": {"aiohttp": "3.8.6"},
67+
}
68+
69+
MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {}
70+
71+
# PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whose dependencies are being processed)
72+
# filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatibility.
73+
PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = {
74+
">=3.14.0": {
75+
"typing-extensions": "4.15.0",
76+
},
77+
">=3.12.0": {
78+
"azure-core": "1.23.1",
79+
"aiohttp": "3.9.0",
80+
"six": "1.16.0",
81+
"requests": "2.30.0",
82+
},
83+
">=3.13.0": {
84+
"typing-extensions": "4.13.0",
85+
"aiohttp": "3.10.6",
86+
},
87+
}
88+
89+
PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {}
90+
91+
# This is used to actively _add_ requirements to the install set. These are used to actively inject
92+
# a new requirement specifier to the set of packages being installed.
93+
SPECIAL_CASE_OVERRIDES = {
94+
# this package has an override
95+
"azure-core": {
96+
# if the version being installed matches this specifier, add the listed packages to the install list
97+
"<1.24.0": ["msrest<0.7.0"],
98+
}
99+
}
100+
101+
__all__ = [
102+
"install_dependent_packages",
103+
"filter_dev_requirements",
104+
"find_released_packages",
105+
]
106+
107+
108+
def install_dependent_packages(
109+
setup_py_file_path: str,
110+
dependency_type: str,
111+
temp_dir: str,
112+
python_executable: Optional[str] = None,
113+
) -> None:
114+
"""Identify and install the dependency set for a package.
115+
116+
:param setup_py_file_path: Path to the target package directory.
117+
:param dependency_type: Either ``"Latest"`` or ``"Minimum"``.
118+
:param temp_dir: Directory where temporary artifacts (e.g. filtered requirements, packages.txt) are written.
119+
:param python_executable: Optional interpreter whose environment should receive the installations. Defaults to
120+
the current ``sys.executable``.
121+
"""
122+
123+
python_exe = python_executable or sys.executable
124+
125+
released_packages = find_released_packages(setup_py_file_path, dependency_type)
126+
override_added_packages: List[str] = []
127+
128+
for pkg_spec in released_packages:
129+
override_added_packages.extend(check_pkg_against_overrides(pkg_spec))
130+
131+
logger.info("%s released packages: %s", dependency_type, released_packages)
132+
133+
additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None
134+
if dependency_type == "Minimum":
135+
additional_filter_fn = handle_incompatible_minimum_dev_reqs
136+
137+
dev_req_file_path = filter_dev_requirements(
138+
setup_py_file_path, released_packages, temp_dir, additional_filter_fn
139+
)
140+
141+
if override_added_packages:
142+
logger.info("Expanding the requirement set by the packages %s.", override_added_packages)
143+
144+
install_set = released_packages + list(set(override_added_packages))
145+
146+
if install_set or dev_req_file_path:
147+
install_packages(install_set, dev_req_file_path, python_exe)
148+
149+
if released_packages:
150+
pkgs_file_path = os.path.join(temp_dir, PKGS_TXT_FILE)
151+
with open(pkgs_file_path, "w", encoding="utf-8") as pkgs_file:
152+
for package in released_packages:
153+
pkgs_file.write(package + "\n")
154+
logger.info("Created file %s to track azure packages found on PyPI", pkgs_file_path)
155+
156+
157+
def check_pkg_against_overrides(pkg_specifier: str) -> List[str]:
158+
"""Apply ``SPECIAL_CASE_OVERRIDES`` for a resolved package specifier."""
159+
160+
additional_installs: List[str] = []
161+
target_package, target_version = pkg_specifier.split("==")
162+
163+
target_version_obj = Version(target_version)
164+
if target_package in SPECIAL_CASE_OVERRIDES:
165+
for specifier_set, extras in SPECIAL_CASE_OVERRIDES[target_package].items():
166+
spec = SpecifierSet(specifier_set)
167+
if target_version_obj in spec:
168+
additional_installs.extend(extras)
169+
170+
return additional_installs
171+
172+
173+
def find_released_packages(setup_py_path: str, dependency_type: str) -> List[str]:
174+
"""Resolve the appropriate released dependency versions for a package."""
175+
176+
pkg_info = ParsedSetup.from_path(setup_py_path)
177+
requires = [r for r in pkg_info.requires if "-nspkg" not in r]
178+
available_packages = [
179+
spec for spec in map(lambda req: process_requirement(req, dependency_type, pkg_info.name), requires) if spec
180+
]
181+
return available_packages
182+
183+
184+
def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]:
185+
"""Apply generic, platform, and package-specific bounds to the available versions list."""
186+
187+
if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES:
188+
versions = [
189+
v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
190+
]
191+
192+
for platform_bound, restrictions in PLATFORM_SPECIFIC_MINIMUM_OVERRIDES.items():
193+
if compare_python_version(platform_bound) and pkg_name in restrictions:
194+
versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])]
195+
196+
if (
197+
originating_pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES
198+
and pkg_name in MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name]
199+
):
200+
versions = [
201+
v
202+
for v in versions
203+
if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
204+
]
205+
206+
if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES:
207+
versions = [
208+
v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
209+
]
210+
211+
for platform_bound, restrictions in PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES.items():
212+
if compare_python_version(platform_bound) and pkg_name in restrictions:
213+
versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])]
214+
215+
if (
216+
originating_pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES
217+
and pkg_name in MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name]
218+
):
219+
versions = [
220+
v
221+
for v in versions
222+
if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
223+
]
224+
225+
return versions
226+
227+
228+
def process_requirement(req: str, dependency_type: str, orig_pkg_name: str) -> str:
229+
"""Determine the matching version for a requirement based on dependency type."""
230+
231+
requirement = parse_require(req)
232+
pkg_name = requirement.name
233+
spec = requirement.specifier if len(requirement.specifier) else None
234+
235+
if not (requirement.marker is None or requirement.marker.evaluate()):
236+
logger.info(
237+
"Skipping requirement %r. Environment marker %r does not apply to current environment.",
238+
req,
239+
str(requirement.marker),
240+
)
241+
return ""
242+
243+
client = PyPIClient()
244+
versions = [str(v) for v in client.get_ordered_versions(pkg_name, True)]
245+
logger.info("Versions available on PyPI for %s: %s", pkg_name, versions)
246+
247+
versions = process_bounded_versions(orig_pkg_name, pkg_name, versions)
248+
249+
if dependency_type == "Latest":
250+
versions.reverse()
251+
252+
for version in versions:
253+
if spec is None or version in spec:
254+
logger.info(
255+
"Found %s version %s that matches specifier %s",
256+
dependency_type,
257+
version,
258+
spec,
259+
)
260+
return pkg_name + "==" + version
261+
262+
logger.error("No version is found on PyPI for package %s that matches specifier %s", pkg_name, spec)
263+
return ""
264+
265+
266+
def check_req_against_exclusion(req: str, req_to_exclude: str) -> bool:
267+
"""Return ``True`` if the dev requirement matches the package slated for exclusion."""
268+
269+
req_id = ""
270+
for char in req:
271+
if re.match(r"[A-Za-z0-9_-]", char):
272+
req_id += char
273+
else:
274+
break
275+
276+
return req_id == req_to_exclude
277+
278+
279+
def filter_dev_requirements(
280+
package_directory: str,
281+
released_packages: List[str],
282+
temp_dir: str,
283+
additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None,
284+
) -> str:
285+
"""Filter dev requirements to avoid reinstalling packages we just resolved."""
286+
287+
dev_req_path = os.path.join(package_directory, DEV_REQ_FILE)
288+
with open(dev_req_path, "r", encoding="utf-8") as dev_req_file:
289+
requirements = dev_req_file.readlines()
290+
291+
released_packages_parsed = [parse_require(p) for p in released_packages]
292+
released_package_names = [p.name for p in released_packages_parsed]
293+
294+
prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req]
295+
req_to_exclude = [
296+
req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names
297+
]
298+
req_to_exclude.extend(released_package_names)
299+
300+
filtered_req = [
301+
req
302+
for req in requirements
303+
if os.path.basename(req.replace("\n", "")) not in req_to_exclude
304+
and not any(check_req_against_exclusion(req, item) for item in req_to_exclude)
305+
]
306+
307+
if additional_filter_fn:
308+
filtered_req = additional_filter_fn(package_directory, filtered_req, released_packages_parsed)
309+
310+
logger.info("Filtered dev requirements: %s", filtered_req)
311+
312+
new_dev_req_path = ""
313+
if filtered_req:
314+
new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE)
315+
with open(new_dev_req_path, "w", encoding="utf-8") as dev_req_file:
316+
dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req)
317+
318+
return new_dev_req_path
319+
320+
321+
def install_packages(packages: List[str], req_file: str, python_executable: str) -> None:
322+
"""Install resolved packages (and optionally a requirements file) into the target environment."""
323+
324+
python_exe = python_executable or sys.executable
325+
commands = get_pip_command(python_exe)
326+
commands.append("install")
327+
328+
if commands[0] == "uv":
329+
commands.extend(["--python", python_exe])
330+
331+
if packages:
332+
commands.extend(packages)
333+
334+
if req_file:
335+
commands.extend(["-r", req_file])
336+
337+
logger.info("Installing packages. Command: %s", commands)
338+
subprocess.check_call(commands)

0 commit comments

Comments
 (0)