Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions eng/tox/install_depend_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@

from subprocess import check_call
from typing import TYPE_CHECKING, Callable, Optional
from pkg_resources import parse_version, Requirement
from pypi_tools.pypi import PyPIClient
from packaging.specifiers import SpecifierSet
from packaging.version import Version
from packaging.requirements import Requirement

from ci_tools.parsing import ParsedSetup, parse_require
from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs, get_pip_command
Expand Down Expand Up @@ -186,7 +186,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
# lower bound general
if pkg_name in MINIMUM_VERSION_GENERIC_OVERRIDES:
versions = [
v for v in versions if parse_version(v) >= parse_version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
v for v in versions if Version(v) >= Version(MINIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
]

# lower bound platform-specific
Expand All @@ -195,7 +195,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound]

if pkg_name in restrictions:
versions = [v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])]
versions = [v for v in versions if Version(v) >= Version(restrictions[pkg_name])]

# lower bound package-specific
if (
Expand All @@ -205,13 +205,13 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
versions = [
v
for v in versions
if parse_version(v) >= parse_version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
if Version(v) >= Version(MINIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
]

# upper bound general
if pkg_name in MAXIMUM_VERSION_GENERIC_OVERRIDES:
versions = [
v for v in versions if parse_version(v) <= parse_version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
v for v in versions if Version(v) <= Version(MAXIMUM_VERSION_GENERIC_OVERRIDES[pkg_name])
]

# upper bound platform
Expand All @@ -220,7 +220,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound]

if pkg_name in restrictions:
versions = [v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])]
versions = [v for v in versions if Version(v) <= Version(restrictions[pkg_name])]

# upper bound package-specific
if (
Expand All @@ -230,7 +230,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
versions = [
v
for v in versions
if parse_version(v) <= parse_version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
if Version(v) <= Version(MAXIMUM_VERSION_SPECIFIC_OVERRIDES[originating_pkg_name][pkg_name])
]

return versions
Expand All @@ -241,7 +241,7 @@ def process_requirement(req, dependency_type, orig_pkg_name):

# find package name and requirement specifier from requires
requirement = parse_require(req)
pkg_name = requirement.key
pkg_name = requirement.name
spec = requirement.specifier if len(requirement.specifier) else None

# Filter out requirements with environment markers that don't match the current environment
Expand Down Expand Up @@ -334,7 +334,7 @@ def filter_dev_requirements(
# filter out any package available on PyPI (released_packages)
# include packages without relative reference and packages not available on PyPI
released_packages = [parse_require(p) for p in released_packages]
released_package_names = [p.key for p in released_packages]
released_package_names = [p.name for p in released_packages]
# find prebuilt whl paths in dev requiremente
prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req]
# filter any req if wheel is for a released package
Expand Down
5 changes: 1 addition & 4 deletions eng/tox/prep_sphinx_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
import logging
import shutil
import argparse
from pkg_resources import Requirement
import ast
import os
import textwrap
import io
from tox_helper_tasks import (
unzip_sdist_to_directory,
move_and_rename
Expand Down Expand Up @@ -55,6 +51,7 @@ def should_build_docs(package_name):
def create_index_file(readme_location, package_rst):
readme_ext = os.path.splitext(readme_location)[1]

output = ""
if readme_ext == ".md":
with open(readme_location, "r") as file:
output = file.read()
Expand Down
5 changes: 0 additions & 5 deletions eng/tox/run_sphinx_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,9 @@
import argparse
import os
import logging
import sys
from prep_sphinx_env import should_build_docs
from run_sphinx_apidoc import is_mgmt_package
from pkg_resources import Requirement
import ast
import os
import textwrap
import io
import shutil

from ci_tools.parsing import ParsedSetup
Expand Down
4 changes: 2 additions & 2 deletions eng/tox/verify_installed_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ def verify_packages(package_file_path):
for p in get_installed_packages():
if "==" in p:
[package, version] = p.split("==")
installed[package.upper()] = version
installed[package.upper().replace("_","-")] = version
expected = {}
for p in packages:
[package, version] = p.split("==")
expected[package.upper()] = version
expected[package.upper().replace("_","-")] = version

missing_packages = [pkg for pkg in expected.keys() if installed.get(pkg) != expected.get(pkg)]

Expand Down
2 changes: 1 addition & 1 deletion scripts/devops_tasks/test_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def find_package_dependency(glob_string, repo_root_dir, dependent_service):
parsed = ParsedSetup.from_path(pkg_root)

# Get a list of package names from install requires
required_pkgs = [parse_require(r).key for r in parsed.requires]
required_pkgs = [parse_require(r).name for r in parsed.requires]
required_pkgs = [p for p in required_pkgs if p.startswith("azure")]

for req_pkg in required_pkgs:
Expand Down
6 changes: 3 additions & 3 deletions scripts/devops_tasks/tox_harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from ci_tools.scenario.generation import replace_dev_reqs
from ci_tools.functions import cleanup_directory
from ci_tools.parsing import ParsedSetup
from pkg_resources import parse_requirements, RequirementParseError
from packaging.requirements import Requirement
import logging

logging.getLogger().setLevel(logging.INFO)
Expand Down Expand Up @@ -58,7 +58,7 @@ def compare_req_to_injected_reqs(parsed_req, injected_packages):

return any(parsed_req.name in req for req in injected_packages)


# todo: verify this code
def inject_custom_reqs(file, injected_packages, package_dir):
req_lines = []
injected_packages = [p for p in re.split(r"[\s,]", injected_packages) if p]
Expand All @@ -69,7 +69,7 @@ def inject_custom_reqs(file, injected_packages, package_dir):
for line in f:
logging.info("Attempting to parse {}".format(line))
try:
parsed_req = [req for req in parse_requirements(line)]
parsed_req = Requirement(line.strip())
except Exception as e:
logging.error(e)
parsed_req = [None]
Expand Down
3 changes: 1 addition & 2 deletions scripts/devops_tasks/update_regression_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import pdb
import json

import pkg_resources
from test_regression import find_package_dependency, AZURE_GLOB_STRING

from ci_tools.functions import discover_targeted_packages
Expand Down Expand Up @@ -85,7 +84,7 @@ def parse_service(pkg_path):
print("The json file {} cannot be loaded.".format(args.json))
exit(1)

if len(service_list) > 0:
if len(service_list) > 0:
settings = json.loads(settings_json)
settings["matrix"]["DependentService"] = list(service_list)
json_result = json.dumps(settings)
Expand Down
7 changes: 4 additions & 3 deletions scripts/multiapi_init_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,10 @@
)
import azure.common

import pkg_resources

pkg_resources.declare_namespace("azure")
# all of the azure packages that are namespace packages have a __init__ that looks like:
# __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
# so we can use the namespace package without explicitly declaring a parent azure namespace. At least according
# to the docs.

_LOGGER = logging.getLogger(__name__)

Expand Down
1 change: 1 addition & 0 deletions sdk/core/azure-core/dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ azure-data-tables
opentelemetry-sdk~=1.26
opentelemetry-instrumentation-requests>=0.50b0
../../identity/azure-identity
packaging # for version parsing in test_basic_transport_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import sys
import asyncio
from unittest.mock import Mock
from pkg_resources import parse_version
from packaging.version import Version
import aiohttp


Expand Down Expand Up @@ -1050,7 +1050,7 @@ async def test_close_too_soon_works_fine(caplog, port, http_request):


@pytest.mark.skipif(
parse_version(aiohttp.__version__) >= parse_version("3.10"),
Version(aiohttp.__version__) >= Version("3.10"),
reason="aiohttp 3.10 introduced separate connection timeout",
)
@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
Expand All @@ -1073,7 +1073,7 @@ async def test_aiohttp_timeout_response(http_request):


@pytest.mark.skipif(
parse_version(aiohttp.__version__) < parse_version("3.10"),
Version(aiohttp.__version__) < Version("3.10"),
reason="aiohttp 3.10 introduced separate connection timeout",
)
@pytest.mark.parametrize("http_request", HTTP_REQUESTS)
Expand Down
2 changes: 1 addition & 1 deletion shared_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -80,4 +80,4 @@ azure-monitor-opentelemetry
python-dotenv
pyrit
prompty
jinja2
Jinja2
2 changes: 1 addition & 1 deletion tools/azure-sdk-tools/ci_tools/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def create_package(
# given the additional requirements of the package, we should install them in the current environment before attempting to build the package
# we assume the presence of `wheel`, `build`, `setuptools>=61.0.0`
pip_output = get_pip_list_output(sys.executable)
necessary_install_requirements = [req for req in setup_parsed.requires if parse_require(req).key not in pip_output.keys()]
necessary_install_requirements = [req for req in setup_parsed.requires if parse_require(req).name not in pip_output.keys()]
run([sys.executable, "-m", "pip", "install", *necessary_install_requirements], cwd=setup_parsed.folder)
run([sys.executable, "-m", "build", f"-n{'s' if enable_sdist else ''}{'w' if enable_wheel else ''}", "-o", dist], cwd=setup_parsed.folder, check=True)
else:
Expand Down
19 changes: 9 additions & 10 deletions tools/azure-sdk-tools/ci_tools/dependency_analysis.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,22 @@
#!/usr/bin/env python
import argparse
import ast
from datetime import datetime
import glob
import io
import json
import os
import re
import sys
import textwrap
from typing import List, Set, Dict, Tuple, Any

try:
from collections import Sized
except:
from collections.abc import Sized

from pkg_resources import Requirement
from packaging.specifiers import SpecifierSet, Version
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet
from packaging.version import Version
from ci_tools.variables import discover_repo_root
from ci_tools.functions import discover_targeted_packages
from ci_tools.parsing import ParsedSetup, parse_require
Expand All @@ -30,7 +29,7 @@
pass # we only technically require this when outputting the rendered report


def get_known_versions(package_name: str) -> List[str]:
def get_known_versions(package_name: str) -> List[Version]:
client = PyPIClient()
return client.get_ordered_versions(package_name)

Expand Down Expand Up @@ -60,16 +59,16 @@ def get_lib_deps(base_dir: str) -> Tuple[Dict[str, Dict[str, Any]], Dict[str, Di
packages = {}
dependencies = {}
for lib_dir in discover_targeted_packages("azure*", base_dir):
setup_path = os.path.join(lib_dir, "setup.py")
try:
setup_path = os.path.join(lib_dir, "setup.py")
parsed = ParsedSetup.from_path(setup_path)
lib_name, version, requires = parsed.name, parsed.version, parsed.requires

packages[lib_name] = {"version": version, "source": lib_dir, "deps": []}

for req in requires:
req_obj = parse_require(req)
req_name = req_obj.key
req_name = req_obj.name
spec = req_obj.specifier if len(req_obj.specifier) else None
if spec is None:
spec = ""
Expand Down Expand Up @@ -101,7 +100,7 @@ def get_wheel_deps(wheel_dir: str) -> Tuple[Dict[str, Dict[str, Any]], Dict[str,
req = re.sub(r"[\s\(\)]", "", req) # Version specifiers appear in parentheses
req_obj = parse_require(req)

req_name = req_obj.key
req_name = req_obj.name
spec = req_obj.specifier if len(req_obj.specifier) else None
if spec is None:
spec = ""
Expand Down Expand Up @@ -152,8 +151,8 @@ def dump_packages(data_pkgs: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, A

def resolve_lib_deps(dump_data: Dict[str, Dict[str, Any]], data_pkgs: Dict[str, Dict[str, Any]], pkg_id: str) -> None:
for dep in dump_data[pkg_id]["deps"]:
dep_req = Requirement.parse(dep["name"] + dep["version"])
if dep["name"] in data_pkgs and data_pkgs[dep["name"]]["version"] in dep_req:
dep_req = Requirement(dep["name"] + dep["version"])
if dep["name"] in data_pkgs and data_pkgs[dep["name"]]["version"] in dep_req.specifier:
# If the internal package version matches the dependency spec,
# rewrite the dep version to match the internal package version
dep["version"] = data_pkgs[dep["name"]]["version"]
Expand Down
12 changes: 6 additions & 6 deletions tools/azure-sdk-tools/ci_tools/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from ast import Not
from packaging.specifiers import SpecifierSet
from packaging.version import Version, parse, InvalidVersion
from pkg_resources import Requirement
from packaging.requirements import Requirement
import io

from ci_tools.variables import discover_repo_root, DEV_BUILD_IDENTIFIER, str_to_bool
Expand Down Expand Up @@ -369,12 +369,12 @@ def process_requires(setup_py_path: str, is_dev_build: bool = False):
"""

pkg_details = ParsedSetup.from_path(setup_py_path)
azure_requirements = [Requirement.parse(r) for r in pkg_details.requires if r.startswith("azure")]
azure_requirements = [Requirement(r) for r in pkg_details.requires if r.startswith("azure")]

# Find package requirements that are not available on PyPI
requirement_to_update = {}
for req in azure_requirements:
pkg_name = req.key
pkg_name = req.name
spec = SpecifierSet(str(req).replace(pkg_name, ""))

if not is_required_version_on_pypi(pkg_name, spec) or is_dev_build:
Expand Down Expand Up @@ -673,7 +673,7 @@ def is_package_compatible(

for immutable_requirement in immutable_requirements:
for package_requirement in package_requirements:
if package_requirement.key == immutable_requirement.key:
if package_requirement.name == immutable_requirement.name:
# if the dev_req line has a requirement that conflicts with the immutable requirement,
# we need to resolve it. We KNOW that the immutable requirement will be of form package==version,
# so we can reliably pull out the version and check it against the specifier of the dev_req line.
Expand Down Expand Up @@ -743,7 +743,7 @@ def resolve_compatible_package(package_name: str, immutable_requirements: List[R
"""

pypi = PyPIClient()
immovable_pkgs = {req.key: req for req in immutable_requirements}
immovable_pkgs = {req.name: req for req in immutable_requirements}

# Let's use a real use-case to walk through this function. We're going to use the azure-ai-language-conversations
# package as an example.
Expand Down Expand Up @@ -935,7 +935,7 @@ def get_pip_command(python_exe: Optional[str] = None) -> List[str]:
:param str python_exe: The Python executable to use (if not using the default).
:return: List of command arguments for pip.
:rtype: List[str]

"""
# Check TOX_PIP_IMPL environment variable (aligns with tox.ini configuration)
pip_impl = os.environ.get('TOX_PIP_IMPL', 'pip').lower()
Expand Down
Loading