diff --git a/eng/tox/install_depend_packages.py b/eng/tox/install_depend_packages.py index b5455371b2b9..92363c8d72c3 100644 --- a/eng/tox/install_depend_packages.py +++ b/eng/tox/install_depend_packages.py @@ -13,10 +13,10 @@ from subprocess import check_call from typing import TYPE_CHECKING, Callable, Optional -from pkg_resources import parse_version, Requirement from pypi_tools.pypi import PyPIClient from packaging.specifiers import SpecifierSet from packaging.version import Version +from packaging.requirements import Requirement from ci_tools.parsing import ParsedSetup, parse_require from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs, get_pip_command @@ -186,7 +186,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: # lower bound general if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES: versions = [ - v for v in versions if parse_version(v) >= parse_version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) + v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) ] # lower bound platform-specific @@ -195,7 +195,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound] if pkg_name in restrictions: - versions = [v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])] + versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])] # lower bound package-specific if ( @@ -205,13 +205,13 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: versions = [ v for v in versions - if parse_version(v) >= parse_version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) ] # upper bound general if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES: versions = [ - v for v in versions if parse_version(v) <= parse_version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) + v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name]) ] # upper bound platform @@ -220,7 +220,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound] if pkg_name in restrictions: - versions = [v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])] + versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])] # upper bound package-specific if ( @@ -230,7 +230,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: versions = [ v for v in versions - if parse_version(v) <= parse_version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) + if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name]) ] return versions @@ -241,7 +241,7 @@ def process_requirement(req, dependency_type, orig_pkg_name): # find package name and requirement specifier from requires requirement = parse_require(req) - pkg_name = requirement.key + pkg_name = requirement.name spec = requirement.specifier if len(requirement.specifier) else None # Filter out requirements with environment markers that don't match the current environment @@ -334,7 +334,7 @@ def filter_dev_requirements( # filter out any package available on PyPI (released_packages) # include packages without relative reference and packages not available on PyPI released_packages = [parse_require(p) for p in released_packages] - released_package_names = [p.key for p in released_packages] + released_package_names = [p.name for p in released_packages] # find prebuilt whl paths in dev requiremente prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] # filter any req if wheel is for a released package diff --git a/eng/tox/prep_sphinx_env.py b/eng/tox/prep_sphinx_env.py index 1bab36a3b879..048497293ec3 100644 --- a/eng/tox/prep_sphinx_env.py +++ b/eng/tox/prep_sphinx_env.py @@ -12,11 +12,7 @@ import logging import shutil import argparse -from pkg_resources import Requirement -import ast import os -import textwrap -import io from tox_helper_tasks import ( unzip_sdist_to_directory, move_and_rename @@ -55,6 +51,7 @@ def should_build_docs(package_name): def create_index_file(readme_location, package_rst): readme_ext = os.path.splitext(readme_location)[1] + output = "" if readme_ext == ".md": with open(readme_location, "r") as file: output = file.read() diff --git a/eng/tox/run_sphinx_build.py b/eng/tox/run_sphinx_build.py index a4e389fd4198..de3e01b0d98c 100644 --- a/eng/tox/run_sphinx_build.py +++ b/eng/tox/run_sphinx_build.py @@ -12,14 +12,9 @@ import argparse import os import logging -import sys from prep_sphinx_env import should_build_docs from run_sphinx_apidoc import is_mgmt_package -from pkg_resources import Requirement -import ast import os -import textwrap -import io import shutil from ci_tools.parsing import ParsedSetup diff --git a/eng/tox/verify_installed_packages.py b/eng/tox/verify_installed_packages.py index b39d93504146..afe6ee6c48c8 100644 --- a/eng/tox/verify_installed_packages.py +++ b/eng/tox/verify_installed_packages.py @@ -38,11 +38,11 @@ def verify_packages(package_file_path): for p in get_installed_packages(): if "==" in p: [package, version] = p.split("==") - installed[package.upper()] = version + installed[package.upper().replace("_","-")] = version expected = {} for p in packages: [package, version] = p.split("==") - expected[package.upper()] = version + expected[package.upper().replace("_","-")] = version missing_packages = [pkg for pkg in expected.keys() if installed.get(pkg) != expected.get(pkg)] diff --git a/scripts/devops_tasks/test_regression.py b/scripts/devops_tasks/test_regression.py index 30498816113e..d9c2662ff55e 100644 --- a/scripts/devops_tasks/test_regression.py +++ b/scripts/devops_tasks/test_regression.py @@ -328,7 +328,7 @@ def find_package_dependency(glob_string, repo_root_dir, dependent_service): parsed = ParsedSetup.from_path(pkg_root) # Get a list of package names from install requires - required_pkgs = [parse_require(r).key for r in parsed.requires] + required_pkgs = [parse_require(r).name for r in parsed.requires] required_pkgs = [p for p in required_pkgs if p.startswith("azure")] for req_pkg in required_pkgs: diff --git a/scripts/devops_tasks/tox_harness.py b/scripts/devops_tasks/tox_harness.py index e7bf0b6e576b..9ffe33502591 100644 --- a/scripts/devops_tasks/tox_harness.py +++ b/scripts/devops_tasks/tox_harness.py @@ -21,7 +21,7 @@ from ci_tools.scenario.generation import replace_dev_reqs from ci_tools.functions import cleanup_directory from ci_tools.parsing import ParsedSetup -from pkg_resources import parse_requirements, RequirementParseError +from packaging.requirements import Requirement import logging logging.getLogger().setLevel(logging.INFO) @@ -58,7 +58,7 @@ def compare_req_to_injected_reqs(parsed_req, injected_packages): return any(parsed_req.name in req for req in injected_packages) - +# todo: verify this code def inject_custom_reqs(file, injected_packages, package_dir): req_lines = [] injected_packages = [p for p in re.split(r"[\s,]", injected_packages) if p] @@ -69,7 +69,7 @@ def inject_custom_reqs(file, injected_packages, package_dir): for line in f: logging.info("Attempting to parse {}".format(line)) try: - parsed_req = [req for req in parse_requirements(line)] + parsed_req = Requirement(line.strip()) except Exception as e: logging.error(e) parsed_req = [None] diff --git a/scripts/devops_tasks/update_regression_services.py b/scripts/devops_tasks/update_regression_services.py index a76d862fe3c1..fa7155a1440a 100644 --- a/scripts/devops_tasks/update_regression_services.py +++ b/scripts/devops_tasks/update_regression_services.py @@ -9,7 +9,6 @@ import pdb import json -import pkg_resources from test_regression import find_package_dependency, AZURE_GLOB_STRING from ci_tools.functions import discover_targeted_packages @@ -85,7 +84,7 @@ def parse_service(pkg_path): print("The json file {} cannot be loaded.".format(args.json)) exit(1) - if len(service_list) > 0: + if len(service_list) > 0: settings = json.loads(settings_json) settings["matrix"]["DependentService"] = list(service_list) json_result = json.dumps(settings) diff --git a/scripts/multiapi_init_gen.py b/scripts/multiapi_init_gen.py index dcb3e119ac4a..73eaffe6f722 100644 --- a/scripts/multiapi_init_gen.py +++ b/scripts/multiapi_init_gen.py @@ -40,9 +40,10 @@ ) import azure.common -import pkg_resources - -pkg_resources.declare_namespace("azure") +# all of the azure packages that are namespace packages have a __init__ that looks like: +# __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +# so we can use the namespace package without explicitly declaring a parent azure namespace. At least according +# to the docs. _LOGGER = logging.getLogger(__name__) diff --git a/sdk/core/azure-core/dev_requirements.txt b/sdk/core/azure-core/dev_requirements.txt index 3e21785796c3..59b6fb6bd6d1 100644 --- a/sdk/core/azure-core/dev_requirements.txt +++ b/sdk/core/azure-core/dev_requirements.txt @@ -12,3 +12,4 @@ azure-data-tables opentelemetry-sdk~=1.26 opentelemetry-instrumentation-requests>=0.50b0 ../../identity/azure-identity +packaging # for version parsing in test_basic_transport_async.py \ No newline at end of file diff --git a/sdk/core/azure-core/tests/async_tests/test_basic_transport_async.py b/sdk/core/azure-core/tests/async_tests/test_basic_transport_async.py index 691996e7e71b..f0e11b1d213c 100644 --- a/sdk/core/azure-core/tests/async_tests/test_basic_transport_async.py +++ b/sdk/core/azure-core/tests/async_tests/test_basic_transport_async.py @@ -26,7 +26,7 @@ import sys import asyncio from unittest.mock import Mock -from pkg_resources import parse_version +from packaging.version import Version import aiohttp @@ -1050,7 +1050,7 @@ async def test_close_too_soon_works_fine(caplog, port, http_request): @pytest.mark.skipif( - parse_version(aiohttp.__version__) >= parse_version("3.10"), + Version(aiohttp.__version__) >= Version("3.10"), reason="aiohttp 3.10 introduced separate connection timeout", ) @pytest.mark.parametrize("http_request", HTTP_REQUESTS) @@ -1073,7 +1073,7 @@ async def test_aiohttp_timeout_response(http_request): @pytest.mark.skipif( - parse_version(aiohttp.__version__) < parse_version("3.10"), + Version(aiohttp.__version__) < Version("3.10"), reason="aiohttp 3.10 introduced separate connection timeout", ) @pytest.mark.parametrize("http_request", HTTP_REQUESTS) diff --git a/shared_requirements.txt b/shared_requirements.txt index 3868d47ae0d2..ec36e25b7a8f 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -80,4 +80,4 @@ azure-monitor-opentelemetry python-dotenv pyrit prompty -jinja2 \ No newline at end of file +Jinja2 \ No newline at end of file diff --git a/tools/azure-sdk-tools/ci_tools/build.py b/tools/azure-sdk-tools/ci_tools/build.py index e4e17a5ad491..f564e95c723a 100644 --- a/tools/azure-sdk-tools/ci_tools/build.py +++ b/tools/azure-sdk-tools/ci_tools/build.py @@ -246,7 +246,7 @@ def create_package( # given the additional requirements of the package, we should install them in the current environment before attempting to build the package # we assume the presence of `wheel`, `build`, `setuptools>=61.0.0` pip_output = get_pip_list_output(sys.executable) - necessary_install_requirements = [req for req in setup_parsed.requires if parse_require(req).key not in pip_output.keys()] + necessary_install_requirements = [req for req in setup_parsed.requires if parse_require(req).name not in pip_output.keys()] run([sys.executable, "-m", "pip", "install", *necessary_install_requirements], cwd=setup_parsed.folder) run([sys.executable, "-m", "build", f"-n{'s' if enable_sdist else ''}{'w' if enable_wheel else ''}", "-o", dist], cwd=setup_parsed.folder, check=True) else: diff --git a/tools/azure-sdk-tools/ci_tools/dependency_analysis.py b/tools/azure-sdk-tools/ci_tools/dependency_analysis.py index 8f268bdf5e70..59a5d15a9b45 100755 --- a/tools/azure-sdk-tools/ci_tools/dependency_analysis.py +++ b/tools/azure-sdk-tools/ci_tools/dependency_analysis.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import argparse -import ast from datetime import datetime import glob import io @@ -8,7 +7,6 @@ import os import re import sys -import textwrap from typing import List, Set, Dict, Tuple, Any try: @@ -16,8 +14,9 @@ except: from collections.abc import Sized -from pkg_resources import Requirement -from packaging.specifiers import SpecifierSet, Version +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet +from packaging.version import Version from ci_tools.variables import discover_repo_root from ci_tools.functions import discover_targeted_packages from ci_tools.parsing import ParsedSetup, parse_require @@ -30,7 +29,7 @@ pass # we only technically require this when outputting the rendered report -def get_known_versions(package_name: str) -> List[str]: +def get_known_versions(package_name: str) -> List[Version]: client = PyPIClient() return client.get_ordered_versions(package_name) @@ -60,8 +59,8 @@ def get_lib_deps(base_dir: str) -> Tuple[Dict[str, Dict[str, Any]], Dict[str, Di packages = {} dependencies = {} for lib_dir in discover_targeted_packages("azure*", base_dir): + setup_path = os.path.join(lib_dir, "setup.py") try: - setup_path = os.path.join(lib_dir, "setup.py") parsed = ParsedSetup.from_path(setup_path) lib_name, version, requires = parsed.name, parsed.version, parsed.requires @@ -69,7 +68,7 @@ def get_lib_deps(base_dir: str) -> Tuple[Dict[str, Dict[str, Any]], Dict[str, Di for req in requires: req_obj = parse_require(req) - req_name = req_obj.key + req_name = req_obj.name spec = req_obj.specifier if len(req_obj.specifier) else None if spec is None: spec = "" @@ -101,7 +100,7 @@ def get_wheel_deps(wheel_dir: str) -> Tuple[Dict[str, Dict[str, Any]], Dict[str, req = re.sub(r"[\s\(\)]", "", req) # Version specifiers appear in parentheses req_obj = parse_require(req) - req_name = req_obj.key + req_name = req_obj.name spec = req_obj.specifier if len(req_obj.specifier) else None if spec is None: spec = "" @@ -152,8 +151,8 @@ def dump_packages(data_pkgs: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, A def resolve_lib_deps(dump_data: Dict[str, Dict[str, Any]], data_pkgs: Dict[str, Dict[str, Any]], pkg_id: str) -> None: for dep in dump_data[pkg_id]["deps"]: - dep_req = Requirement.parse(dep["name"] + dep["version"]) - if dep["name"] in data_pkgs and data_pkgs[dep["name"]]["version"] in dep_req: + dep_req = Requirement(dep["name"] + dep["version"]) + if dep["name"] in data_pkgs and data_pkgs[dep["name"]]["version"] in dep_req.specifier: # If the internal package version matches the dependency spec, # rewrite the dep version to match the internal package version dep["version"] = data_pkgs[dep["name"]]["version"] diff --git a/tools/azure-sdk-tools/ci_tools/functions.py b/tools/azure-sdk-tools/ci_tools/functions.py index 7ef1fe00bc4b..e07c5b94cb0a 100644 --- a/tools/azure-sdk-tools/ci_tools/functions.py +++ b/tools/azure-sdk-tools/ci_tools/functions.py @@ -7,7 +7,7 @@ from ast import Not from packaging.specifiers import SpecifierSet from packaging.version import Version, parse, InvalidVersion -from pkg_resources import Requirement +from packaging.requirements import Requirement import io from ci_tools.variables import discover_repo_root, DEV_BUILD_IDENTIFIER, str_to_bool @@ -369,12 +369,12 @@ def process_requires(setup_py_path: str, is_dev_build: bool = False): """ pkg_details = ParsedSetup.from_path(setup_py_path) - azure_requirements = [Requirement.parse(r) for r in pkg_details.requires if r.startswith("azure")] + azure_requirements = [Requirement(r) for r in pkg_details.requires if r.startswith("azure")] # Find package requirements that are not available on PyPI requirement_to_update = {} for req in azure_requirements: - pkg_name = req.key + pkg_name = req.name spec = SpecifierSet(str(req).replace(pkg_name, "")) if not is_required_version_on_pypi(pkg_name, spec) or is_dev_build: @@ -673,7 +673,7 @@ def is_package_compatible( for immutable_requirement in immutable_requirements: for package_requirement in package_requirements: - if package_requirement.key == immutable_requirement.key: + if package_requirement.name == immutable_requirement.name: # if the dev_req line has a requirement that conflicts with the immutable requirement, # we need to resolve it. We KNOW that the immutable requirement will be of form package==version, # so we can reliably pull out the version and check it against the specifier of the dev_req line. @@ -743,7 +743,7 @@ def resolve_compatible_package(package_name: str, immutable_requirements: List[R """ pypi = PyPIClient() - immovable_pkgs = {req.key: req for req in immutable_requirements} + immovable_pkgs = {req.name: req for req in immutable_requirements} # Let's use a real use-case to walk through this function. We're going to use the azure-ai-language-conversations # package as an example. @@ -935,7 +935,7 @@ def get_pip_command(python_exe: Optional[str] = None) -> List[str]: :param str python_exe: The Python executable to use (if not using the default). :return: List of command arguments for pip. :rtype: List[str] - + """ # Check TOX_PIP_IMPL environment variable (aligns with tox.ini configuration) pip_impl = os.environ.get('TOX_PIP_IMPL', 'pip').lower() diff --git a/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 1635355dbc02..ff2f23e22657 100644 --- a/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -14,12 +14,8 @@ from typing import Dict, List, Tuple, Any, Optional -# Assumes the presence of setuptools -from pkg_resources import parse_requirements, Requirement - # this assumes the presence of "packaging" -from packaging.specifiers import SpecifierSet -import setuptools +from packaging.requirements import Requirement from setuptools import Extension from ci_tools.variables import str_to_bool @@ -313,7 +309,7 @@ def setup(*args, **kwargs): classifiers = kwargs.get("classifiers", []) keywords = kwargs.get("keywords", []) - is_new_sdk = name in NEW_REQ_PACKAGES or any(map(lambda x: (parse_require(x).key in NEW_REQ_PACKAGES), requires)) + is_new_sdk = name in NEW_REQ_PACKAGES or any(map(lambda x: (parse_require(x).name in NEW_REQ_PACKAGES), requires)) ext_package = kwargs.get("ext_package", None) ext_modules = kwargs.get("ext_modules", []) @@ -367,6 +363,8 @@ def parse_pyproject( project_config = toml_dict.get("project", None) + assert project_config is not None, f"Unable to find [project] section in {pyproject_filename}. Please ensure it is present." + # to pull a version from pyproject.toml, we need to get a dynamic version out. We can ask # setuptools to give us the metadata for a package, but that will involve _partially building_ the package # to create an egginfo folder. This is a very expensive operation goes against the entire point of @@ -393,7 +391,7 @@ def parse_pyproject( version = parsed_version python_requires = project_config.get("requires-python") requires = project_config.get("dependencies") - is_new_sdk = name in NEW_REQ_PACKAGES or any(map(lambda x: (parse_require(x).key in NEW_REQ_PACKAGES), requires)) + is_new_sdk = name in NEW_REQ_PACKAGES or any(map(lambda x: (parse_require(x).name in NEW_REQ_PACKAGES), requires)) name_space = name.replace("-", ".") package_data = get_value_from_dict(toml_dict, "tool.setuptools.package-data", None) @@ -569,11 +567,11 @@ def get_install_requires(setup_path: str) -> List[str]: def parse_require(req: str) -> Requirement: """ - Parses the incoming version specification and returns a tuple of the requirement name and specifier. + Parses a PEP 508 requirement string into a Requirement object. - "azure-core<2.0.0,>=1.11.0" -> [azure-core, <2.0.0,>=1.11.0] + Example: "azure-core<2.0.0,>=1.11.0" """ - return Requirement.parse(req) + return Requirement(req) def get_name_from_specifier(version: str) -> str: diff --git a/tools/azure-sdk-tools/ci_tools/scenario/generation.py b/tools/azure-sdk-tools/ci_tools/scenario/generation.py index f6d9b05a5da7..7eb473e58d61 100644 --- a/tools/azure-sdk-tools/ci_tools/scenario/generation.py +++ b/tools/azure-sdk-tools/ci_tools/scenario/generation.py @@ -131,7 +131,7 @@ def create_package_and_install( # parse the specifier requirement = parse_require(req) - req_name = requirement.key + req_name = requirement.name req_specifier = requirement.specifier if len(requirement.specifier) else None # if we have the package already present... @@ -160,7 +160,7 @@ def create_package_and_install( ) except subprocess.CalledProcessError as e: requirement = parse_require(addition) - non_present_reqs.append(requirement.key) + non_present_reqs.append(requirement.name) additional_downloaded_reqs = [ os.path.abspath(os.path.join(tmp_dl_folder, pth)) for pth in os.listdir(tmp_dl_folder) diff --git a/tools/azure-sdk-tools/tests/test_conflict_resolution.py b/tools/azure-sdk-tools/tests/test_conflict_resolution.py index 6baafe345d66..a758aebabe0b 100644 --- a/tools/azure-sdk-tools/tests/test_conflict_resolution.py +++ b/tools/azure-sdk-tools/tests/test_conflict_resolution.py @@ -5,7 +5,7 @@ from ci_tools.functions import resolve_compatible_package, is_package_compatible from typing import Optional, List from packaging.version import Version -from pkg_resources import Requirement +from packaging.requirements import Requirement @pytest.mark.parametrize( diff --git a/tools/azure-sdk-tools/tests/test_parse_functionality.py b/tools/azure-sdk-tools/tests/test_parse_functionality.py index 35756f9a971f..49201b248930 100644 --- a/tools/azure-sdk-tools/tests/test_parse_functionality.py +++ b/tools/azure-sdk-tools/tests/test_parse_functionality.py @@ -17,7 +17,7 @@ def test_parse_require(): test_scenarios = [ - ("ConfigArgParse>=0.12.0", "configargparse", ">=0.12.0"), + ("ConfigArgParse>=0.12.0", "ConfigArgParse", ">=0.12.0"), ("msrest>=0.6.10", "msrest", ">=0.6.10"), ("azure-core<2.0.0,>=1.2.2", "azure-core", "<2.0.0,>=1.2.2"), ("msrest==0.6.10", "msrest", "==0.6.10"), @@ -31,10 +31,10 @@ def test_parse_require(): for scenario in test_scenarios: result = parse_require(scenario[0]) - assert result.key is not None + assert result.name is not None if scenario[2] is not None: assert len(result.specifier) != 0 - assert result.key == scenario[1] + assert result.name == scenario[1] assert str(result.specifier) == (scenario[2] or "") @@ -44,7 +44,7 @@ def test_parse_require_with_no_spec(): for scenario in spec_scenarios: result = parse_require(scenario) - assert result.key == scenario.replace("_", "-") + assert result.name == scenario assert len(result.specifier) == 0