Skip to content

Commit

Permalink
Improvements to mindependency dev_requirement conflict resolution (#3…
Browse files Browse the repository at this point in the history
…7669)

* during mindependency runs, dev_requirements on local relative paths are now checked for conflict with the targeted set of minimum dependencies
* multiple type clarifications within azure-sdk-tools
* added tests for new conflict resolution logic

---------

Co-authored-by: McCoy Patiño <39780829+mccoyp@users.noreply.github.com>
  • Loading branch information
scbedd and mccoyp authored Oct 4, 2024
1 parent fc972c5 commit 1349cd0
Show file tree
Hide file tree
Showing 8 changed files with 315 additions and 64 deletions.
74 changes: 38 additions & 36 deletions eng/tox/install_depend_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,21 @@
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------


import argparse
import os
import sys
import logging
import re

from subprocess import check_call
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Callable, Optional
from pkg_resources import parse_version, Requirement
from pypi_tools.pypi import PyPIClient
from packaging.specifiers import SpecifierSet
from packaging.version import Version, parse
from packaging.version import Version

from ci_tools.parsing import ParsedSetup, parse_require
from ci_tools.functions import compare_python_version
from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs

from typing import List

Expand Down Expand Up @@ -59,20 +59,15 @@
"azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"},
"azure-identity": {"msal": "1.23.0"},
"azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"},
"azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}
"azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"},
}

MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {}

# PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whos dependencies are being processed)
# filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatbility
PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = {
">=3.12.0": {
"azure-core": "1.23.1",
"aiohttp": "3.8.6",
"six": "1.16.0",
"requests": "2.30.0"
}
">=3.12.0": {"azure-core": "1.23.1", "aiohttp": "3.8.6", "six": "1.16.0", "requests": "2.30.0"}
}

PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {}
Expand Down Expand Up @@ -101,8 +96,15 @@ def install_dependent_packages(setup_py_file_path, dependency_type, temp_dir):
override_added_packages.extend(check_pkg_against_overrides(pkg_spec))

logging.info("%s released packages: %s", dependency_type, released_packages)
# filter released packages from dev_requirements and create a new file "new_dev_requirements.txt"
dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, dependency_type)

additional_filter_fn = None
if dependency_type == "Minimum":
additional_filter_fn = handle_incompatible_minimum_dev_reqs

# before september 2024, filter_dev_requirements only would remove any packages present in released_packages from the dev_requirements,
# then create a new file "new_dev_requirements.txt" without the problematic packages.
# after september 2024, filter_dev_requirements will also check for **compatibility** with the packages being installed when filtering the dev_requirements.
dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, additional_filter_fn)

if override_added_packages:
logging.info(f"Expanding the requirement set by the packages {override_added_packages}.")
Expand Down Expand Up @@ -157,6 +159,7 @@ def find_released_packages(setup_py_path, dependency_type):

return avlble_packages


def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]:
"""
Processes a target package based on an originating package (target is a dep of originating) and the versions available from pypi for the target package.
Expand All @@ -180,9 +183,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound]

if pkg_name in restrictions:
versions = [
v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])
]
versions = [v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])]

# lower bound package-specific
if (
Expand All @@ -207,9 +208,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound]

if pkg_name in restrictions:
versions = [
v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])
]
versions = [v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])]

# upper bound package-specific
if (
Expand Down Expand Up @@ -249,7 +248,6 @@ def process_requirement(req, dependency_type, orig_pkg_name):

# think of the various versions that come back from pypi as the top of a funnel
# We apply generic overrides -> platform specific overrides -> package specific overrides

versions = process_bounded_versions(orig_pkg_name, pkg_name, versions)

# Search from lowest to latest in case of finding minimum dependency
Expand Down Expand Up @@ -301,17 +299,20 @@ def check_req_against_exclusion(req, req_to_exclude):

return req_id == req_to_exclude

# todo: remove when merging #37450
def replace_identity(dev_requirement_line) -> str:
regex = r"azure[-_]identity"

if re.search(regex, dev_requirement_line):
return "azure-identity==1.17.0\n"
else:
return dev_requirement_line

def filter_dev_requirements(
setup_py_path,
released_packages,
temp_dir,
additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None,
):
"""
This function takes an existing package path, a list of specific package specifiers that we have resolved, a temporary directory to write
the modified dev_requirements to, and an optional additional_filter_fn that can be used to further filter the dev_requirements file if necessary.
def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependency_type):
The function will filter out any requirements present in the dev_requirements file that are present in the released_packages list (aka are required
by the package).
"""
# This method returns list of requirements from dev_requirements by filtering out packages in given list
dev_req_path = os.path.join(os.path.dirname(setup_py_path), DEV_REQ_FILE)
requirements = []
Expand All @@ -320,12 +321,13 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen

# filter out any package available on PyPI (released_packages)
# include packages without relative reference and packages not available on PyPI
released_packages = [p.split("==")[0] for p in released_packages]
released_packages = [parse_require(p) for p in released_packages]
released_package_names = [p.key for p in released_packages]
# find prebuilt whl paths in dev requiremente
prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req]
# filter any req if wheel is for a released package
req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_packages]
req_to_exclude.extend(released_packages)
req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names]
req_to_exclude.extend(released_package_names)

filtered_req = [
req
Expand All @@ -334,9 +336,9 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen
and not any([check_req_against_exclusion(req, i) for i in req_to_exclude])
]

if dependency_type == "Minimum":
# replace identity with the minimum version of the package
filtered_req = [replace_identity(req) for req in filtered_req]
if additional_filter_fn:
# this filter function handles the case where a dev requirement is incompatible with the current set of targeted packages
filtered_req = additional_filter_fn(setup_py_path, filtered_req, released_packages)

logging.info("Filtered dev requirements: %s", filtered_req)

Expand All @@ -345,7 +347,7 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen
# create new dev requirements file with different name for filtered requirements
new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE)
with open(new_dev_req_path, "w") as dev_req_file:
dev_req_file.writelines(filtered_req)
dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req)

return new_dev_req_path

Expand Down
2 changes: 1 addition & 1 deletion scripts/devops_tasks/common_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def is_required_version_on_pypi(package_name: str, spec: str) -> bool:
versions = [str(v) for v in versions if v in specifier]
except:
logging.error("Package {} is not found on PyPI".format(package_name))
return versions
return bool(versions)


def find_packages_missing_on_pypi(path: str) -> Iterable[str]:
Expand Down
3 changes: 1 addition & 2 deletions sdk/identity/azure-identity-broker/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
[tool.azure-sdk-build]
type_check_samples = false
pyright = false
mindependency = false
pyright = false
Loading

0 comments on commit 1349cd0

Please sign in to comment.