From 4ce58161b8cdfe1f0a78bea141f4df4b89aac898 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 13:42:50 -0700 Subject: [PATCH 01/39] Add pixi support --- tests/test_pixi.py | 71 ++++++++++++++++++++++++++++++++++++++++++++++ unidep/_cli.py | 23 +++++++++++++-- unidep/_pixi.py | 63 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 155 insertions(+), 2 deletions(-) create mode 100644 tests/test_pixi.py create mode 100644 unidep/_pixi.py diff --git a/tests/test_pixi.py b/tests/test_pixi.py new file mode 100644 index 00000000..4b205f6d --- /dev/null +++ b/tests/test_pixi.py @@ -0,0 +1,71 @@ +"""unidep tests.""" + +from __future__ import annotations + +import textwrap +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from unidep import ( + parse_requirements, + resolve_conflicts, +) +from unidep._dependencies_parsing import yaml_to_toml +from unidep._pixi import generate_pixi_toml + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 8): + from typing import Literal + else: # pragma: no cover + from typing_extensions import Literal + + +def maybe_as_toml(toml_or_yaml: Literal["toml", "yaml"], p: Path) -> Path: + if toml_or_yaml == "toml": + toml = yaml_to_toml(p) + p.unlink() + p = p.with_name("pyproject.toml") + p.write_text(toml) + return p + + +@pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) +def test_filter_python_dependencies_with_platforms( + toml_or_yaml: Literal["toml", "yaml"], + tmp_path: Path, +) -> None: + p = tmp_path / "requirements.yaml" + p.write_text( + textwrap.dedent( + """\ + channels: + - conda-forge + dependencies: + - foo # [unix] + """, + ), + ) + p = maybe_as_toml(toml_or_yaml, p) + requirements = parse_requirements(p, verbose=False) + resolved = resolve_conflicts(requirements.requirements, ["linux-64"]) + output_file = tmp_path / "pixi.toml" + generate_pixi_toml( + resolved, + requirements, + output_file=output_file, + verbose=False, + ) + assert output_file.read_text() == textwrap.dedent( + """\ + [project] + platforms = ["linux-64"] + channels = ["conda-forge"] + + [dependencies] + foo = "*" + """, + ) diff --git a/unidep/_cli.py b/unidep/_cli.py index 43612ac7..3ac8ab63 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -29,6 +29,7 @@ parse_local_dependencies, parse_requirements, ) +from unidep._pixi import generate_pixi_toml from unidep._setuptools_integration import ( filter_python_dependencies, get_python_dependencies, @@ -64,7 +65,7 @@ def _get_help_string(self, action: argparse.Action) -> str | None: from argparse import HelpFormatter as _HelpFormatter # type: ignore[assignment] _DEP_FILES = "`requirements.yaml` or `pyproject.toml`" -CondaExecutable = Literal["conda", "mamba", "micromamba"] +CondaExecutable = Literal["conda", "mamba", "micromamba", "pixi"] def _add_common_args( # noqa: PLR0912, C901 @@ -916,7 +917,25 @@ def _install_command( # noqa: PLR0912, PLR0915 skip_pip = True skip_conda = True - if env_spec.conda and not skip_conda: + if skip_conda: + pass + elif conda_executable == "pixi": + print("๐Ÿ”ฎ Installing conda dependencies with `pixi`") + generate_pixi_toml( + resolved, + platforms, + channels=requirements.channels, + output_file="pixi.toml", + verbose=verbose, + ) + # Install dependencies using pixi + if not dry_run: + subprocess.run(["pixi", "install"], check=True) + # Optionally, handle local packages + if not skip_local: + _install_local_packages_with_pixi(...) + return # Exit after handling pixi + elif env_spec.conda: assert conda_executable is not None channel_args = ["--override-channels"] if env_spec.channels else [] for channel in env_spec.channels: diff --git a/unidep/_pixi.py b/unidep/_pixi.py new file mode 100644 index 00000000..ad3e3202 --- /dev/null +++ b/unidep/_pixi.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import sys +from pathlib import Path +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from unidep._dependencies_parsing import ParsedRequirements + from unidep.platform_definitions import CondaPip, Platform, Spec + +try: # pragma: no cover + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + HAS_TOML = True +except ImportError: # pragma: no cover + HAS_TOML = False + + +def generate_pixi_toml( + resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], + requirements: ParsedRequirements, + output_file: str = "pixi.toml", + *, + verbose: bool = False, +) -> None: + pixi_data = {} + + pixi_data["project"] = { + "platforms": requirements.platforms, + "channels": requirements.channels, + } + + # Include extra configurations from pyproject.toml + pixi_data.update(_parse_pixi_sections_from_pyproject()) + + # Map unidep dependencies to pixi.toml sections + pixi_data.setdefault("dependencies", {}) + pixi_data.setdefault("pypi-dependencies", {}) + + # Add conda dependencies + for dep in resolved_dependencies["conda"]: + pixi_data["dependencies"][dep.name] = dep.pin or "*" + + # Add pip dependencies + for dep in resolved_dependencies["pip"]: + pixi_data["pypi-dependencies"][dep.name] = dep.pin or "*" + + # Write pixi.toml file + with open(output_file, "w") as f: # noqa: PTH123 + tomllib.dump(pixi_data, f) + if verbose: + print(f"โœ… Generated pixi.toml at {output_file}") + + +def _parse_pixi_sections_from_pyproject() -> dict[str, Any]: + pyproject_path = Path("pyproject.toml") + if not pyproject_path.exists(): + return {} + with pyproject_path.open("rb") as f: + pyproject_data = tomllib.load(f) + return pyproject_data.get("tool", {}).get("unidep", {}).get("pixi", {}) From 63890d1aae8ee208756903e8d6d236b95a5c2195 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 4 Oct 2024 20:43:28 +0000 Subject: [PATCH 02/39] Update files from markdown-code-runner --- example/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/example/README.md b/example/README.md index 69c50255..6d2547c5 100644 --- a/example/README.md +++ b/example/README.md @@ -110,11 +110,11 @@ Using `unidep` for installation offers a more comprehensive approach. It handles $ unidep install --dry-run -e ./setup_py_project ๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge pandas">=1,<3" adaptive">=0.15.0, <2.0.0" pfapack packaging adaptive-scheduler numpy">=1.21" hpc05 pexpect pytest pytest-cov` -๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.5/x64/bin/python -m pip install yaml2bib rsync-time-machine slurm-usage pyyaml aiokef markdown-code-runner numthreads unidep` +๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install yaml2bib rsync-time-machine slurm-usage pyyaml aiokef markdown-code-runner numthreads unidep` ๐Ÿ“ Found local dependencies: {'setup_py_project': ['hatch_project', 'setuptools_project']} -๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.5/x64/bin/python -m pip install --no-dependencies -e /home/runner/work/unidep/unidep/example/hatch_project -e /home/runner/work/unidep/unidep/example/setuptools_project -e ./setup_py_project` +๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install --no-dependencies -e /home/runner/work/unidep/unidep/example/hatch_project -e /home/runner/work/unidep/unidep/example/setuptools_project -e ./setup_py_project` ``` @@ -157,11 +157,11 @@ unidep install-all -e $ unidep install-all -e --dry-run ๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive-scheduler numpy">=1.21" hpc05 pandas">=1,<3" pexpect adaptive">=0.15.0, <2.0.0" pfapack packaging pytest pytest-cov` -๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.5/x64/bin/python -m pip install unidep markdown-code-runner numthreads yaml2bib rsync-time-machine slurm-usage pyyaml aiokef` +๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install unidep markdown-code-runner numthreads yaml2bib rsync-time-machine slurm-usage pyyaml aiokef` ๐Ÿ“ Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} -๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.5/x64/bin/python -m pip install --no-dependencies -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project` +๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install --no-dependencies -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project` ``` From 75fc81a010ff2e2e00d66e7ea5c9546eedffc8d9 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 14:40:29 -0700 Subject: [PATCH 03/39] wip --- tests/test_pixi.py | 18 +++++-- unidep/_cli.py | 28 ++++++++--- unidep/_conda_lock.py | 3 +- unidep/_pixi.py | 109 +++++++++++++++++++++++++++++++++++------- 4 files changed, 128 insertions(+), 30 deletions(-) diff --git a/tests/test_pixi.py b/tests/test_pixi.py index 4b205f6d..4002f0e9 100644 --- a/tests/test_pixi.py +++ b/tests/test_pixi.py @@ -3,7 +3,6 @@ from __future__ import annotations import textwrap -from pathlib import Path from typing import TYPE_CHECKING import pytest @@ -17,6 +16,7 @@ if TYPE_CHECKING: import sys + from pathlib import Path if sys.version_info >= (3, 8): from typing import Literal @@ -46,6 +46,8 @@ def test_filter_python_dependencies_with_platforms( - conda-forge dependencies: - foo # [unix] + platforms: + - linux-64 """, ), ) @@ -55,17 +57,23 @@ def test_filter_python_dependencies_with_platforms( output_file = tmp_path / "pixi.toml" generate_pixi_toml( resolved, - requirements, + channels=requirements.channels, + platforms=requirements.platforms, output_file=output_file, verbose=False, ) assert output_file.read_text() == textwrap.dedent( """\ [project] - platforms = ["linux-64"] - channels = ["conda-forge"] + name = "unidep" + platforms = [ + "linux-64", + ] + channels = [ + "conda-forge", + ] - [dependencies] + [target.linux-64.dependencies] foo = "*" """, ) diff --git a/unidep/_cli.py b/unidep/_cli.py index 3ac8ab63..6a6e1787 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -319,6 +319,11 @@ def _parse_args() -> argparse.Namespace: " `- numpy # [linux]` becomes `sel(linux): numpy`, if `comment` then" " it remains `- numpy # [linux]`, by default `sel`", ) + parser_merge.add_argument( + "--pixi", + action="store_true", + help="Generate a `pixi.toml` file instead of `environment.yaml`", + ) _add_common_args( parser_merge, { @@ -861,7 +866,7 @@ def _pip_install_local( subprocess.run(pip_command, check=True) -def _install_command( # noqa: PLR0912, PLR0915 +def _install_command( # noqa: C901, PLR0912, PLR0915 *files: Path, conda_executable: CondaExecutable | None, conda_env_name: str | None, @@ -923,17 +928,17 @@ def _install_command( # noqa: PLR0912, PLR0915 print("๐Ÿ”ฎ Installing conda dependencies with `pixi`") generate_pixi_toml( resolved, - platforms, channels=requirements.channels, + platforms=platforms, output_file="pixi.toml", verbose=verbose, ) # Install dependencies using pixi if not dry_run: - subprocess.run(["pixi", "install"], check=True) + subprocess.run(["pixi", "install"], check=True) # noqa: S607 # Optionally, handle local packages - if not skip_local: - _install_local_packages_with_pixi(...) + # if not skip_local: + # _install_local_packages_with_pixi(...) return # Exit after handling pixi elif env_spec.conda: assert conda_executable is not None @@ -1205,6 +1210,7 @@ def _merge_command( ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], + pixi: bool, verbose: bool, ) -> None: # pragma: no cover # When using stdout, suppress verbose output @@ -1236,13 +1242,22 @@ def _merge_command( platforms, optional_dependencies=requirements.optional_dependencies, ) + output_file = None if stdout else output + if pixi: + generate_pixi_toml( + resolved, + channels=requirements.channels, + platforms=requirements.platforms, + output_file=output_file, + verbose=verbose, + ) + return env_spec = create_conda_env_specification( resolved, requirements.channels, platforms, selector=selector, ) - output_file = None if stdout else output write_conda_environment_file(env_spec, output_file, name, verbose=verbose) if output_file: found_files_str = ", ".join(f"`{f}`" for f in found_files) @@ -1427,6 +1442,7 @@ def main() -> None: ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, + pixi=args.pixi, verbose=args.verbose, ) elif args.command == "pip": # pragma: no cover diff --git a/unidep/_conda_lock.py b/unidep/_conda_lock.py index b00af6ad..94b9cf4e 100644 --- a/unidep/_conda_lock.py +++ b/unidep/_conda_lock.py @@ -116,8 +116,9 @@ def _conda_lock_global( selector="comment", platforms=platforms, ignore_pins=ignore_pins, - overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, + overwrite_pins=overwrite_pins, + pixi=False, verbose=verbose, ) _run_conda_lock( diff --git a/unidep/_pixi.py b/unidep/_pixi.py index ad3e3202..92d179de 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -4,8 +4,9 @@ from pathlib import Path from typing import TYPE_CHECKING, Any +from unidep._conda_env import _extract_conda_pip_dependencies + if TYPE_CHECKING: - from unidep._dependencies_parsing import ParsedRequirements from unidep.platform_definitions import CondaPip, Platform, Spec try: # pragma: no cover @@ -20,36 +21,108 @@ def generate_pixi_toml( resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], - requirements: ParsedRequirements, - output_file: str = "pixi.toml", + channels: list[str], + platforms: list[Platform], + output_file: str | Path | None = "pixi.toml", *, verbose: bool = False, ) -> None: - pixi_data = {} + pixi_data = _initialize_pixi_data(channels, platforms) + _process_dependencies(pixi_data, resolved_dependencies) + _write_pixi_toml(pixi_data, output_file, verbose=verbose) + - pixi_data["project"] = { - "platforms": requirements.platforms, - "channels": requirements.channels, - } +def _initialize_pixi_data( + channels: list[str], + platforms: list[Platform], +) -> dict[str, dict[str, Any]]: + pixi_data: dict[str, dict[str, Any]] = {} # Include extra configurations from pyproject.toml - pixi_data.update(_parse_pixi_sections_from_pyproject()) + sections = _parse_pixi_sections_from_pyproject() + pixi_data.update(sections) - # Map unidep dependencies to pixi.toml sections + # Set 'project' section + pixi_data.setdefault("project", {}) + project_name = Path.cwd().name + pixi_data["project"].setdefault("name", project_name) + pixi_data["project"].setdefault("platforms", platforms) + pixi_data["project"].setdefault("channels", channels) + + # Initialize dependencies sections pixi_data.setdefault("dependencies", {}) pixi_data.setdefault("pypi-dependencies", {}) + pixi_data.setdefault("target", {}) # For platform-specific dependencies + + return pixi_data + + +def _process_dependencies( + pixi_data: dict[str, dict[str, Any]], + resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], +) -> None: + # Extract conda and pip dependencies + conda_deps, pip_deps = _extract_conda_pip_dependencies(resolved_dependencies) + + # Process conda dependencies + for pkg_name, platform_to_spec in conda_deps.items(): + for _platform, spec in platform_to_spec.items(): + pin = spec.pin or "*" + if _platform is None: + # Applies to all platforms + pixi_data["dependencies"][pkg_name] = pin + else: + # Platform-specific dependency + # Ensure target section exists + target = pixi_data["target"].setdefault(_platform, {}) + deps = target.setdefault("dependencies", {}) + deps[pkg_name] = pin - # Add conda dependencies - for dep in resolved_dependencies["conda"]: - pixi_data["dependencies"][dep.name] = dep.pin or "*" + # Process pip dependencies + for pkg_name, platform_to_spec in pip_deps.items(): + for _platform, spec in platform_to_spec.items(): + pin = spec.pin or "*" + if _platform is None: + # Applies to all platforms + pixi_data["pypi-dependencies"][pkg_name] = pin + else: + # Platform-specific dependency + # Ensure target section exists + target = pixi_data["target"].setdefault(_platform, {}) + deps = target.setdefault("pypi-dependencies", {}) + deps[pkg_name] = pin - # Add pip dependencies - for dep in resolved_dependencies["pip"]: - pixi_data["pypi-dependencies"][dep.name] = dep.pin or "*" + # Remove empty sections if necessary + if not pixi_data["dependencies"]: + del pixi_data["dependencies"] + if not pixi_data["pypi-dependencies"]: + del pixi_data["pypi-dependencies"] + if not pixi_data["target"]: + del pixi_data["target"] + + +def _write_pixi_toml( + pixi_data: dict[str, dict[str, Any]], + output_file: str | Path | None, + *, + verbose: bool, +) -> None: + try: + import tomli_w + except ImportError: # pragma: no cover + msg = ( + "โŒ `tomli_w` is required to write TOML files." + " Install it with `pip install tomli_w`." + ) + raise ImportError(msg) from None # Write pixi.toml file - with open(output_file, "w") as f: # noqa: PTH123 - tomllib.dump(pixi_data, f) + if output_file is not None: + with open(output_file, "wb") as f: # noqa: PTH123 + tomli_w.dump(pixi_data, f) + else: + # to stdout + tomli_w.dump(pixi_data, sys.stdout.buffer) if verbose: print(f"โœ… Generated pixi.toml at {output_file}") From 44d677b8fcafc07e5dd6709f21acf19de3b91013 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 4 Oct 2024 21:44:01 +0000 Subject: [PATCH 04/39] Update files from markdown-code-runner --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 86160eee..65c34754 100644 --- a/README.md +++ b/README.md @@ -443,8 +443,8 @@ See `unidep merge -h` for more information: ```bash usage: unidep merge [-h] [-o OUTPUT] [-n NAME] [--stdout] - [--selector {sel,comment}] [-d DIRECTORY] [--depth DEPTH] - [-v] + [--selector {sel,comment}] [--pixi] [-d DIRECTORY] + [--depth DEPTH] [-v] [--platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] @@ -468,6 +468,8 @@ options: `sel` then `- numpy # [linux]` becomes `sel(linux): numpy`, if `comment` then it remains `- numpy # [linux]`, by default `sel` + --pixi Generate a `pixi.toml` file instead of + `environment.yaml` -d DIRECTORY, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` From fb07b2379cd59cbdda3758093fa19e1526ea7646 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 14:46:54 -0700 Subject: [PATCH 05/39] wip --- unidep/_cli.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/unidep/_cli.py b/unidep/_cli.py index 6a6e1787..7a67ccd6 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -294,9 +294,9 @@ def _parse_args() -> argparse.Namespace: parser_merge.add_argument( "-o", "--output", - type=Path, - default="environment.yaml", - help="Output file for the conda environment, by default `environment.yaml`", + default=None, + help="Output file for the conda environment, by default `environment.yaml`" + ", or `pixi.toml` if `--pixi` is used", ) parser_merge.add_argument( "-n", @@ -1203,7 +1203,7 @@ def _merge_command( directory: Path, files: list[Path] | None, name: str, - output: Path, + output: str | Path | None, stdout: bool, selector: Literal["sel", "comment"], platforms: list[Platform], @@ -1216,6 +1216,10 @@ def _merge_command( # When using stdout, suppress verbose output verbose = verbose and not stdout + if output is None: + output = "environment.yaml" if not pixi else "pixi.toml" + output = Path(output) + if files: # ignores depth and directory! found_files = files else: From 737ff615b4bf484b1ca358adcef94060357f6bed Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 14:49:06 -0700 Subject: [PATCH 06/39] args --- unidep/_cli.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/unidep/_cli.py b/unidep/_cli.py index 7a67ccd6..5b918d0a 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -276,7 +276,8 @@ def _parse_args() -> argparse.Namespace: merge_help = ( f"Combine multiple (or a single) {_DEP_FILES}" " files into a" - " single Conda installable `environment.yaml` file." + " single Conda installable `environment.yaml` file" + " or Pixi installable `pixi.toml` file." ) merge_example = ( " Example usage: `unidep merge --directory . --depth 1 --output environment.yaml`" # noqa: E501 @@ -296,7 +297,7 @@ def _parse_args() -> argparse.Namespace: "--output", default=None, help="Output file for the conda environment, by default `environment.yaml`" - ", or `pixi.toml` if `--pixi` is used", + " or `pixi.toml` if `--pixi` is used", ) parser_merge.add_argument( "-n", From 0ff0e853a220464a5e9df8a79a59a9b11537d14b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 4 Oct 2024 21:57:40 +0000 Subject: [PATCH 07/39] Update files from markdown-code-runner --- README.md | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 65c34754..78365cb7 100644 --- a/README.md +++ b/README.md @@ -390,7 +390,8 @@ positional arguments: Subcommands merge Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files into a single Conda installable - `environment.yaml` file. + `environment.yaml` file or Pixi installable + `pixi.toml` file. install Automatically install all dependencies from one or more `requirements.yaml` or `pyproject.toml` files. This command first installs dependencies with Conda, @@ -450,17 +451,18 @@ usage: unidep merge [-h] [-o OUTPUT] [-n NAME] [--stdout] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files -into a single Conda installable `environment.yaml` file. Example usage: -`unidep merge --directory . --depth 1 --output environment.yaml` to search for -`requirements.yaml` or `pyproject.toml` files in the current directory and its -subdirectories and create `environment.yaml`. These are the defaults, so you -can also just run `unidep merge`. +into a single Conda installable `environment.yaml` file or Pixi installable +`pixi.toml` file. Example usage: `unidep merge --directory . --depth 1 +--output environment.yaml` to search for `requirements.yaml` or +`pyproject.toml` files in the current directory and its subdirectories and +create `environment.yaml`. These are the defaults, so you can also just run +`unidep merge`. options: -h, --help show this help message and exit -o OUTPUT, --output OUTPUT Output file for the conda environment, by default - `environment.yaml` + `environment.yaml` or `pixi.toml` if `--pixi` is used -n NAME, --name NAME Name of the conda environment, by default `myenv` --stdout Output to stdout instead of a file --selector {sel,comment} From 13d149498c60ba8c8f4925bc32e8cb56efea04c5 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 15:40:00 -0700 Subject: [PATCH 08/39] wip --- .gitignore | 4 + example/pyproject_toml_project/pyproject.toml | 18 +- tests/test_pixi.py | 1 + unidep/_cli.py | 76 +++- unidep/_pixi.py | 11 +- unidep/_pixi_lock.py | 328 ++++++++++++++++++ unidep/utils.py | 15 +- 7 files changed, 435 insertions(+), 18 deletions(-) create mode 100644 unidep/_pixi_lock.py diff --git a/.gitignore b/.gitignore index 72364f99..c25d86fe 100644 --- a/.gitignore +++ b/.gitignore @@ -87,3 +87,7 @@ ENV/ # Rope project settings .ropeproject + +# pixi environments +.pixi +*.egg-info diff --git a/example/pyproject_toml_project/pyproject.toml b/example/pyproject_toml_project/pyproject.toml index cbb3f17a..9d4a6349 100644 --- a/example/pyproject_toml_project/pyproject.toml +++ b/example/pyproject_toml_project/pyproject.toml @@ -18,15 +18,11 @@ py-modules = ["pyproject_toml_project"] [tool.unidep] channels = ["conda-forge"] dependencies = [ - "adaptive:linux64", - "pfapack:linux64", - "packaging", - { pip = "markdown-code-runner" }, - { pip = "numthreads" }, + "libzlib" ] -local_dependencies = [ - "../hatch_project[test]", # Local dependency with optional dependencies -] -[tool.unidep.optional_dependencies] -dev = ["mypy", "ruff"] -test = ["pytest"] +# local_dependencies = [ +# "../hatch_project[test]", # Local dependency with optional dependencies +# ] +# [tool.unidep.optional_dependencies] +# dev = ["mypy", "ruff"] +# test = ["pytest"] diff --git a/tests/test_pixi.py b/tests/test_pixi.py index 4002f0e9..acf12ff6 100644 --- a/tests/test_pixi.py +++ b/tests/test_pixi.py @@ -57,6 +57,7 @@ def test_filter_python_dependencies_with_platforms( output_file = tmp_path / "pixi.toml" generate_pixi_toml( resolved, + project_name=None, channels=requirements.channels, platforms=requirements.platforms, output_file=output_file, diff --git a/unidep/_cli.py b/unidep/_cli.py index 5b918d0a..0f3bcdc1 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -30,6 +30,7 @@ parse_requirements, ) from unidep._pixi import generate_pixi_toml +from unidep._pixi_lock import pixi_lock_command from unidep._setuptools_integration import ( filter_python_dependencies, get_python_dependencies, @@ -265,7 +266,7 @@ def _add_extra_flags( ) -def _parse_args() -> argparse.Namespace: +def _parse_args() -> argparse.Namespace: # noqa: PLR0915 parser = argparse.ArgumentParser( description="Unified Conda and Pip requirements management.", formatter_class=_HelpFormatter, @@ -486,6 +487,61 @@ def _parse_args() -> argparse.Namespace: ) _add_extra_flags(parser_lock, "conda-lock lock", "conda-lock", "--micromamba") + # Subparser for the 'pixi-lock' command + pixi_lock_help = ( + "Generate a global `pixi.lock` file for a collection of" + f" {_DEP_FILES}" + " files. Additionally, create individual" + f" `pixi.lock` files for each {_DEP_FILES} file" + " consistent with the global lock file." + ) + pixi_lock_example = ( + " Example usage: `unidep pixi-lock --directory ./projects` to generate" + f" pixi lock files for all {_DEP_FILES}" + " files in the `./projects`" + " directory. Use `--only-global` to generate only the global lock file." + ) + + parser_pixi_lock = subparsers.add_parser( + "pixi-lock", + help=pixi_lock_help, + description=pixi_lock_help + pixi_lock_example, + formatter_class=_HelpFormatter, + ) + + parser_pixi_lock.add_argument( + "--only-global", + action="store_true", + help="Only generate the global lock file", + ) + parser_pixi_lock.add_argument( + "--lockfile", + type=Path, + default="pixi.lock", + help="Specify a path for the global lockfile (default: `pixi.lock`" + " in current directory). Path should be relative, e.g.," + " `--lockfile ./locks/pixi.lock`.", + ) + parser_pixi_lock.add_argument( + "--check-input-hash", + action="store_true", + help="Check existing input hashes in lockfiles before regenerating lock files.", + ) + _add_common_args( + parser_pixi_lock, + { + "directory", + "file-alt", + "verbose", + "platform", + "depth", + "ignore-pin", + "skip-dependency", + "overwrite-pin", + }, + ) + _add_extra_flags(parser_pixi_lock, "pixi lock", "pixi-lock", "--platform") + # Subparser for the 'pip-compile' command pip_compile_help = ( "Generate a fully pinned `requirements.txt` file from one or more" @@ -929,6 +985,7 @@ def _install_command( # noqa: C901, PLR0912, PLR0915 print("๐Ÿ”ฎ Installing conda dependencies with `pixi`") generate_pixi_toml( resolved, + name=None, channels=requirements.channels, platforms=platforms, output_file="pixi.toml", @@ -1251,6 +1308,7 @@ def _merge_command( if pixi: generate_pixi_toml( resolved, + project_name=name, channels=requirements.channels, platforms=requirements.platforms, output_file=output_file, @@ -1430,7 +1488,7 @@ def _pip_subcommand( return escape_unicode(separator).join(pip_dependencies) -def main() -> None: +def main() -> None: # noqa: PLR0912 """Main entry point for the command-line tool.""" args = _parse_args() @@ -1545,6 +1603,20 @@ def main() -> None: extra_flags=args.extra_flags, lockfile=args.lockfile, ) + elif args.command == "pixi-lock": + pixi_lock_command( + depth=args.depth, + directory=args.directory, + files=args.file or None, + platforms=args.platform, + verbose=args.verbose, + only_global=args.only_global, + ignore_pins=args.ignore_pin, + skip_dependencies=args.skip_dependency, + overwrite_pins=args.overwrite_pin, + check_input_hash=args.check_input_hash, + extra_flags=args.extra_flags, + ) elif args.command == "pip-compile": # pragma: no cover if args.platform and len(args.platform) > 1: print( diff --git a/unidep/_pixi.py b/unidep/_pixi.py index 92d179de..4241a0c7 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -5,6 +5,7 @@ from typing import TYPE_CHECKING, Any from unidep._conda_env import _extract_conda_pip_dependencies +from unidep.utils import identify_current_platform if TYPE_CHECKING: from unidep.platform_definitions import CondaPip, Platform, Spec @@ -21,13 +22,14 @@ def generate_pixi_toml( resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], + project_name: str, channels: list[str], platforms: list[Platform], output_file: str | Path | None = "pixi.toml", *, verbose: bool = False, ) -> None: - pixi_data = _initialize_pixi_data(channels, platforms) + pixi_data = _initialize_pixi_data(channels, platforms, project_name) _process_dependencies(pixi_data, resolved_dependencies) _write_pixi_toml(pixi_data, output_file, verbose=verbose) @@ -35,17 +37,18 @@ def generate_pixi_toml( def _initialize_pixi_data( channels: list[str], platforms: list[Platform], + project_name: str, ) -> dict[str, dict[str, Any]]: pixi_data: dict[str, dict[str, Any]] = {} - + if not platforms: + platforms = [identify_current_platform()] # Include extra configurations from pyproject.toml sections = _parse_pixi_sections_from_pyproject() pixi_data.update(sections) # Set 'project' section pixi_data.setdefault("project", {}) - project_name = Path.cwd().name - pixi_data["project"].setdefault("name", project_name) + pixi_data["project"].setdefault("name", project_name or Path.cwd().name) pixi_data["project"].setdefault("platforms", platforms) pixi_data["project"].setdefault("channels", channels) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py new file mode 100644 index 00000000..737514e5 --- /dev/null +++ b/unidep/_pixi_lock.py @@ -0,0 +1,328 @@ +from __future__ import annotations + +import shutil +import subprocess +import sys +from collections import defaultdict +from typing import TYPE_CHECKING, Any, NamedTuple + +from ruamel.yaml import YAML + +from unidep._dependencies_parsing import find_requirements_files, parse_requirements +from unidep.utils import add_comment_to_file, change_directory + +if TYPE_CHECKING: + from pathlib import Path + + from unidep.platform_definitions import Platform + + if sys.version_info >= (3, 8): + pass + else: + pass + + +def _run_pixi_lock( + pixi_toml: Path, + pixi_lock_output: Path, + *, + check_input_hash: bool = False, + extra_flags: list[str], +) -> None: + if shutil.which("pixi") is None: + msg = ( + "Cannot find `pixi`." + " Please install it with `mamba install -c conda-forge pixi`." + ) + raise RuntimeError(msg) + if not check_input_hash and pixi_lock_output.exists(): + print(f"๐Ÿ—‘๏ธ Removing existing `{pixi_lock_output}`") + pixi_lock_output.unlink() + + cmd = [ + "pixi", + "list", + *extra_flags, + ] + if check_input_hash: + cmd.append("--check-input-hash") + print(f"๐Ÿ”’ Locking dependencies with `{' '.join(cmd)}`\n") + try: + with change_directory(pixi_toml.parent): + subprocess.run(cmd, check=True, text=True, capture_output=True) + # Optionally process the lock file if needed + add_comment_to_file( + pixi_lock_output, + extra_lines=[ + "#", + "# This environment can be installed with", + "# `pixi install`", + "# This file is a `pixi.lock` file generated via `unidep`.", + "# For details see https://pixi.sh/", + ], + ) + except subprocess.CalledProcessError as e: + print("โŒ Error occurred:\n", e) + print("Return code:", e.returncode) + print("Output:", e.output) + print("Error Output:", e.stderr) + sys.exit(1) + + +def _pixi_lock_global( + *, + depth: int, + directory: Path, + files: list[Path] | None, + platforms: list[Platform], + verbose: bool, + check_input_hash: bool, + ignore_pins: list[str], + skip_dependencies: list[str], + overwrite_pins: list[str], + extra_flags: list[str], +) -> Path: + """Generate a pixi.lock file for the global dependencies.""" + from unidep._cli import _merge_command + + if files: + directory = files[0].parent + + pixi_toml = directory / "pixi.toml" + pixi_lock_output = directory / "pixi.lock" + _merge_command( + depth=depth, + directory=directory, + files=files, + name="myenv", + output=pixi_toml, + stdout=False, + selector="comment", + platforms=platforms, + ignore_pins=ignore_pins, + skip_dependencies=skip_dependencies, + overwrite_pins=overwrite_pins, + pixi=True, + verbose=verbose, + ) + _run_pixi_lock( + pixi_toml, + pixi_lock_output, + check_input_hash=check_input_hash, + extra_flags=extra_flags, + ) + print("โœ… Global dependencies locked successfully in `pixi.lock`.") + return pixi_toml.with_name("pixi.lock") + + +class PixiLockSpec(NamedTuple): + """A specification of the pixi lock file.""" + + packages: dict[tuple[Platform, str], dict[str, Any]] + + +def _parse_pixi_lock_packages( + pixi_lock_data: dict[str, Any], +) -> PixiLockSpec: + packages = {} + environments = pixi_lock_data.get("environments", {}) + for env_name, env_data in environments.items(): + channels = env_data.get("channels", []) + for platform, packages_list in env_data.get("packages", {}).items(): + for pkg_entry in packages_list: + # pkg_entry is a dict like {'conda': 'url'} + for manager, url in pkg_entry.items(): + # Extract the package filename from the URL + package_filename = url.split("/")[-1] + # Remove the file extension to get package name and version + package_name_version = package_filename.split(".")[0] + # Split the name and version + # For conda packages, the format is name-version-build + parts = package_name_version.split("-") + if len(parts) >= 3: + package_name = "-".join( + parts[:-2], + ) # Join parts for names with hyphens + package_version = parts[-2] + else: + package_name = parts[0] + package_version = parts[1] if len(parts) > 1 else "" + key = (platform, package_name) + packages[key] = { + "environment": env_name, + "channels": channels, + "package": pkg_entry, + "manager": manager, + "url": url, + "version": package_version, + } + return PixiLockSpec(packages=packages) + + +def _pixi_lock_subpackage( + *, + file: Path, + lock_spec: PixiLockSpec, + platforms: list[Platform], + yaml: YAML | None, # Passing this to preserve order! +) -> Path: + requirements = parse_requirements(file) + locked_entries: dict[Platform, list[dict]] = defaultdict(list) + + for name, specs in requirements.requirements.items(): + if name.startswith("__"): + continue + for spec in specs: + _platforms = spec.platforms() + if _platforms is None: + _platforms = platforms + else: + _platforms = [p for p in _platforms if p in platforms] + + for _platform in _platforms: + key = (_platform, name) + if key in lock_spec.packages: + pkg_entry = lock_spec.packages[key]["package"] + locked_entries[_platform].append(pkg_entry) + else: + print( + f"โš ๏ธ Package {name} for platform {_platform} not found" + " in global lock file.", + ) + + # Generate subproject pixi.lock + pixi_lock_output = file.parent / "pixi.lock" + sub_lock_data = { + "version": 5, + "environments": { + "default": { + "channels": lock_spec.packages[next(iter(lock_spec.packages))][ + "channels" + ], + "packages": dict(locked_entries), + }, + }, + } + + if yaml is None: + yaml = YAML(typ="rt") + yaml.default_flow_style = False + yaml.width = 4096 + yaml.representer.ignore_aliases = lambda *_: True # Disable anchors + + with pixi_lock_output.open("w") as fp: + yaml.dump(sub_lock_data, fp) + + add_comment_to_file( + pixi_lock_output, + extra_lines=[ + "#", + "# This environment can be installed with", + "# `pixi install`", + "# This file is a `pixi.lock` file generated via `unidep`.", + "# For details see https://github.com/pyx/conda-pix", + ], + ) + return pixi_lock_output + + +def _check_consistent_lock_files( + global_lock_file: Path, + sub_lock_files: list[Path], +) -> list[str]: + yaml = YAML(typ="safe") + with global_lock_file.open() as fp: + global_data = yaml.load(fp) + + global_packages = set() + environments = global_data.get("environments", {}) + for env_data in environments.values(): + for packages_list in env_data.get("packages", {}).values(): + print(f"{packages_list=}") + global_packages.update(packages_list) + + mismatches = [] + for lock_file in sub_lock_files: + with lock_file.open() as fp: + data = yaml.load(fp) + + sub_packages = set() + environments = data.get("environments", {}) + for env_data in environments.values(): + for packages_list in env_data.get("packages", {}).values(): + sub_packages.update(packages_list) + + if not sub_packages.issubset(global_packages): + missing = sub_packages - global_packages + mismatches.append( + f"Packages {missing} in {lock_file} not found in global lock file.", + ) + + return mismatches + + +def pixi_lock_command( + *, + depth: int, + directory: Path, + files: list[Path] | None, + platforms: list[Platform], + verbose: bool, + only_global: bool, + check_input_hash: bool, + ignore_pins: list[str], + skip_dependencies: list[str], + overwrite_pins: list[str], + extra_flags: list[str], +) -> None: + """Generate a pixi.lock file for a collection of dependencies.""" + if extra_flags: + assert extra_flags[0] == "--" + extra_flags = extra_flags[1:] + if verbose: + print(f"๐Ÿ“ Extra flags for `pixi lock`: {extra_flags}") + + pixi_lock_output = _pixi_lock_global( + depth=depth, + directory=directory, + files=files, + platforms=platforms, + verbose=verbose, + check_input_hash=check_input_hash, + ignore_pins=ignore_pins, + overwrite_pins=overwrite_pins, + skip_dependencies=skip_dependencies, + extra_flags=extra_flags, + ) + if only_global or files: + return + + with YAML(typ="safe") as yaml, pixi_lock_output.open() as fp: + global_lock_data = yaml.load(fp) + + lock_spec = _parse_pixi_lock_packages(global_lock_data) + + sub_lock_files = [] + found_files = find_requirements_files(directory, depth) + for file in found_files: + if file.parent == directory: + continue + sublock_file = _pixi_lock_subpackage( + file=file, + lock_spec=lock_spec, + platforms=platforms, + yaml=yaml, + ) + print(f"๐Ÿ“ Generated lock file for `{file}`: `{sublock_file}`") + sub_lock_files.append(sublock_file) + + mismatches = _check_consistent_lock_files( + global_lock_file=pixi_lock_output, + sub_lock_files=sub_lock_files, + ) + if not mismatches: + print("โœ… Analyzed all lock files and found no inconsistencies.") + else: + print("โŒ Mismatches found:") + for mismatch in mismatches: + print(mismatch) diff --git a/unidep/utils.py b/unidep/utils.py index 07f7f381..4413274b 100644 --- a/unidep/utils.py +++ b/unidep/utils.py @@ -6,13 +6,15 @@ from __future__ import annotations import codecs +import os import platform import re import sys import warnings from collections import defaultdict +from contextlib import contextmanager from pathlib import Path -from typing import Any, NamedTuple, cast +from typing import Any, Generator, NamedTuple, cast from unidep._version import __version__ from unidep.platform_definitions import ( @@ -374,3 +376,14 @@ def get_package_version(package_name: str) -> str | None: return pkg_resources.get_distribution(package_name).version except pkg_resources.DistributionNotFound: return None + + +@contextmanager +def change_directory(new_path: str | Path) -> Generator[None, None, None]: + """A context manager to change the current working directory.""" + original_path = os.getcwd() # noqa: PTH109 + try: + os.chdir(new_path) + yield + finally: + os.chdir(original_path) From f0fdcc1fbe97b371a16d0a514416de8d0d871fe1 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 15:40:34 -0700 Subject: [PATCH 09/39] wip --- unidep/_pixi_lock.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 737514e5..9ac5ea04 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -130,19 +130,16 @@ def _parse_pixi_lock_packages( channels = env_data.get("channels", []) for platform, packages_list in env_data.get("packages", {}).items(): for pkg_entry in packages_list: - # pkg_entry is a dict like {'conda': 'url'} for manager, url in pkg_entry.items(): - # Extract the package filename from the URL + # Extract package name from URL package_filename = url.split("/")[-1] - # Remove the file extension to get package name and version - package_name_version = package_filename.split(".")[0] - # Split the name and version - # For conda packages, the format is name-version-build - parts = package_name_version.split("-") + # Remove the extension + if package_filename.endswith((".conda", ".tar.bz2")): + package_filename = package_filename.rsplit(".", 1)[0] + # For conda packages, format is name-version-build + parts = package_filename.split("-") if len(parts) >= 3: - package_name = "-".join( - parts[:-2], - ) # Join parts for names with hyphens + package_name = "-".join(parts[:-2]) package_version = parts[-2] else: package_name = parts[0] From 0bd689a0a0ba72ebccaa72d776c16198e285cb6b Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 16:06:27 -0700 Subject: [PATCH 10/39] wip --- tests/simple_monorepo/pixi.lock | 95 ++++++++++++++++++++++++ tests/simple_monorepo/pixi.toml | 16 ++++ tests/simple_monorepo/project1/pixi.lock | 15 ++++ tests/simple_monorepo/project2/pixi.lock | 15 ++++ tests/test_pixi_lock.py | 45 +++++++++++ unidep/_cli.py | 2 +- unidep/_pixi.py | 4 +- unidep/_pixi_lock.py | 23 ++++-- 8 files changed, 207 insertions(+), 8 deletions(-) create mode 100644 tests/simple_monorepo/pixi.lock create mode 100644 tests/simple_monorepo/pixi.toml create mode 100644 tests/simple_monorepo/project1/pixi.lock create mode 100644 tests/simple_monorepo/project2/pixi.lock create mode 100644 tests/test_pixi_lock.py diff --git a/tests/simple_monorepo/pixi.lock b/tests/simple_monorepo/pixi.lock new file mode 100644 index 00000000..d2cb5ac8 --- /dev/null +++ b/tests/simple_monorepo/pixi.lock @@ -0,0 +1,95 @@ +# This file is created and managed by `unidep` 0.63.2. +# For details see https://github.com/basnijholt/unidep +# File generated with: `unidep pixi-lock` +# +# This environment can be installed with +# `pixi install` +# This file is a `pixi.lock` file generated via `unidep`. +# For details see https://pixi.sh/ + +version: 5 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda +packages: +- kind: conda + name: bzip2 + version: 1.0.8 + build: h99b78c6_7 + build_number: 7 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + size: 122909 + timestamp: 1720974522888 +- kind: conda + name: bzip2 + version: 1.0.8 + build: hfdf4475_7 + build_number: 7 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 + md5: 7ed4301d437b59045be7e051a0308211 + depends: + - __osx >=10.13 + license: bzip2-1.0.6 + license_family: BSD + size: 134188 + timestamp: 1720974491916 +- kind: conda + name: python_abi + version: '3.13' + build: 5_cp313t + build_number: 5 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 + md5: 32ba8fc57ccb0b48dd6006974f65c525 + constrains: + - python 3.13.* *_cp313t + license: BSD-3-Clause + license_family: BSD + size: 6300 + timestamp: 1723823108577 +- kind: conda + name: python_abi + version: '3.13' + build: 5_cp313t + build_number: 5 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda + sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b + md5: 24a9a05eba65586da53ad7b56a06dc02 + constrains: + - python 3.13.* *_cp313t + license: BSD-3-Clause + license_family: BSD + size: 6317 + timestamp: 1723823118660 +- kind: conda + name: tzdata + version: 2024b + build: hc8b5060_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + md5: 8ac3367aafb1cc0a068483c580af8015 + license: LicenseRef-Public-Domain + size: 122354 + timestamp: 1728047496079 diff --git a/tests/simple_monorepo/pixi.toml b/tests/simple_monorepo/pixi.toml new file mode 100644 index 00000000..5cbdb7a5 --- /dev/null +++ b/tests/simple_monorepo/pixi.toml @@ -0,0 +1,16 @@ +[project] +name = "myenv" +platforms = [ + "osx-64", + "osx-arm64", +] +channels = [ + "conda-forge", +] + +[dependencies] +bzip2 = "*" +python_abi = "*" + +[target.osx-arm64.dependencies] +tzdata = "*" diff --git a/tests/simple_monorepo/project1/pixi.lock b/tests/simple_monorepo/project1/pixi.lock new file mode 100644 index 00000000..3bad4815 --- /dev/null +++ b/tests/simple_monorepo/project1/pixi.lock @@ -0,0 +1,15 @@ +# This file is created and managed by `unidep` 0.63.2. +# For details see https://github.com/basnijholt/unidep +# File generated with: `unidep pixi-lock` +# +# This environment can be installed with +# `pixi install` +# This file is a `pixi.lock` file generated via `unidep`. +# For details see https://github.com/pyx/conda-pix + +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ +packages: {} +version: 5 diff --git a/tests/simple_monorepo/project2/pixi.lock b/tests/simple_monorepo/project2/pixi.lock new file mode 100644 index 00000000..3bad4815 --- /dev/null +++ b/tests/simple_monorepo/project2/pixi.lock @@ -0,0 +1,15 @@ +# This file is created and managed by `unidep` 0.63.2. +# For details see https://github.com/basnijholt/unidep +# File generated with: `unidep pixi-lock` +# +# This environment can be installed with +# `pixi install` +# This file is a `pixi.lock` file generated via `unidep`. +# For details see https://github.com/pyx/conda-pix + +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ +packages: {} +version: 5 diff --git a/tests/test_pixi_lock.py b/tests/test_pixi_lock.py new file mode 100644 index 00000000..9ae4ecd4 --- /dev/null +++ b/tests/test_pixi_lock.py @@ -0,0 +1,45 @@ +"""unidep pixi-lock tests.""" + +from __future__ import annotations + +import shutil +from pathlib import Path +from unittest.mock import patch + +from ruamel.yaml import YAML + +from unidep._pixi_lock import pixi_lock_command + + +def test_conda_lock_command(tmp_path: Path) -> None: + folder = tmp_path / "simple_monorepo" + shutil.copytree(Path(__file__).parent / "simple_monorepo", folder) + with patch("unidep._conda_lock._run_pixi_lock", return_value=None): + pixi_lock_command( + depth=1, + directory=folder, + files=None, + platforms=["linux-64", "osx-arm64"], + verbose=True, + only_global=False, + check_input_hash=True, + ignore_pins=[], + overwrite_pins=[], + skip_dependencies=[], + extra_flags=["--", "--micromamba"], + ) + with YAML(typ="safe") as yaml: + with (folder / "project1" / "conda-lock.yml").open() as f: + lock1 = yaml.load(f) + with (folder / "project2" / "conda-lock.yml").open() as f: + lock2 = yaml.load(f) + + assert [p["name"] for p in lock1["package"] if p["platform"] == "osx-arm64"] == [ + "bzip2", + "python_abi", + "tzdata", + ] + assert [p["name"] for p in lock2["package"] if p["platform"] == "osx-arm64"] == [ + "python_abi", + "tzdata", + ] diff --git a/unidep/_cli.py b/unidep/_cli.py index 0f3bcdc1..5bf17008 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -985,7 +985,7 @@ def _install_command( # noqa: C901, PLR0912, PLR0915 print("๐Ÿ”ฎ Installing conda dependencies with `pixi`") generate_pixi_toml( resolved, - name=None, + project_name=None, channels=requirements.channels, platforms=platforms, output_file="pixi.toml", diff --git a/unidep/_pixi.py b/unidep/_pixi.py index 4241a0c7..84ae34e8 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -22,7 +22,7 @@ def generate_pixi_toml( resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], - project_name: str, + project_name: str | None, channels: list[str], platforms: list[Platform], output_file: str | Path | None = "pixi.toml", @@ -37,7 +37,7 @@ def generate_pixi_toml( def _initialize_pixi_data( channels: list[str], platforms: list[Platform], - project_name: str, + project_name: str | None, ) -> dict[str, dict[str, Any]]: pixi_data: dict[str, dict[str, Any]] = {} if not platforms: diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 9ac5ea04..bdd79559 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -49,7 +49,7 @@ def _run_pixi_lock( print(f"๐Ÿ”’ Locking dependencies with `{' '.join(cmd)}`\n") try: with change_directory(pixi_toml.parent): - subprocess.run(cmd, check=True, text=True, capture_output=True) + subprocess.run(cmd, check=True, text=True) # Optionally process the lock file if needed add_comment_to_file( pixi_lock_output, @@ -187,6 +187,14 @@ def _pixi_lock_subpackage( " in global lock file.", ) + urls = defaultdict(list) + packages_list = [] + for platform, entries in locked_entries.items(): + for entry in entries: + for url in entry.values(): + urls[platform].append(url) + packages_list.append(entry) + # Generate subproject pixi.lock pixi_lock_output = file.parent / "pixi.lock" sub_lock_data = { @@ -199,6 +207,7 @@ def _pixi_lock_subpackage( "packages": dict(locked_entries), }, }, + "packages": dict(locked_entries), } if yaml is None: @@ -235,8 +244,10 @@ def _check_consistent_lock_files( environments = global_data.get("environments", {}) for env_data in environments.values(): for packages_list in env_data.get("packages", {}).values(): - print(f"{packages_list=}") - global_packages.update(packages_list) + for pkg_entry in packages_list: + # pkg_entry is a dict like {'conda': 'url'} + for manager, url in pkg_entry.items(): + global_packages.add(url) mismatches = [] for lock_file in sub_lock_files: @@ -247,7 +258,9 @@ def _check_consistent_lock_files( environments = data.get("environments", {}) for env_data in environments.values(): for packages_list in env_data.get("packages", {}).values(): - sub_packages.update(packages_list) + for pkg_entry in packages_list: + for manager, url in pkg_entry.items(): + sub_packages.add(url) if not sub_packages.issubset(global_packages): missing = sub_packages - global_packages @@ -298,7 +311,7 @@ def pixi_lock_command( global_lock_data = yaml.load(fp) lock_spec = _parse_pixi_lock_packages(global_lock_data) - + print(f"{lock_spec=}") sub_lock_files = [] found_files = find_requirements_files(directory, depth) for file in found_files: From 7cd96082bab4dadda5efcc83c10c525da1dd6300 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 16:06:58 -0700 Subject: [PATCH 11/39] p. --- tests/test_pixi_lock.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_pixi_lock.py b/tests/test_pixi_lock.py index 9ae4ecd4..768ec7b3 100644 --- a/tests/test_pixi_lock.py +++ b/tests/test_pixi_lock.py @@ -11,10 +11,10 @@ from unidep._pixi_lock import pixi_lock_command -def test_conda_lock_command(tmp_path: Path) -> None: +def test_pixi_lock_command(tmp_path: Path) -> None: folder = tmp_path / "simple_monorepo" shutil.copytree(Path(__file__).parent / "simple_monorepo", folder) - with patch("unidep._conda_lock._run_pixi_lock", return_value=None): + with patch("unidep._pixi_lock._run_pixi_lock", return_value=None): pixi_lock_command( depth=1, directory=folder, From ab4821b88061fe29e70029d0a2ca35ee16a1bcee Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 4 Oct 2024 23:11:05 +0000 Subject: [PATCH 12/39] Update files from markdown-code-runner --- README.md | 9 +++++++-- example/README.md | 12 ++++++------ example/environment.yaml | 5 +++-- 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 78365cb7..5c341bab 100644 --- a/README.md +++ b/README.md @@ -380,13 +380,13 @@ See [example](example/) for more information or check the output of `unidep -h` ```bash usage: unidep [-h] - {merge,install,install-all,conda-lock,pip-compile,pip,conda,version} + {merge,install,install-all,conda-lock,pixi-lock,pip-compile,pip,conda,version} ... Unified Conda and Pip requirements management. positional arguments: - {merge,install,install-all,conda-lock,pip-compile,pip,conda,version} + {merge,install,install-all,conda-lock,pixi-lock,pip-compile,pip,conda,version} Subcommands merge Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files into a single Conda installable @@ -410,6 +410,11 @@ positional arguments: lock.yml` files for each `requirements.yaml` or `pyproject.toml` file consistent with the global lock file. + pixi-lock Generate a global `pixi.lock` file for a collection of + `requirements.yaml` or `pyproject.toml` files. + Additionally, create individual `pixi.lock` files for + each `requirements.yaml` or `pyproject.toml` file + consistent with the global lock file. pip-compile Generate a fully pinned `requirements.txt` file from one or more `requirements.yaml` or `pyproject.toml` files using `pip-compile` from `pip-tools`. This diff --git a/example/README.md b/example/README.md index 6d2547c5..790c0eb5 100644 --- a/example/README.md +++ b/example/README.md @@ -61,12 +61,12 @@ This would be the same as running `unidep merge --name myenv --verbose`: ๐Ÿ“„ Parsing `hatch2_project/pyproject.toml` ๐Ÿ“„ Parsing `hatch_project/requirements.yaml` ๐Ÿ“„ Parsing `pyproject_toml_project/pyproject.toml` -๐Ÿ“„ Parsing `../hatch_project[test]` from `local_dependencies` -๐Ÿ“„ Parsing `pyproject_toml_project/../hatch_project/requirements.yaml[test]` -๐Ÿ“„ Moving `test` optional dependencies to main dependencies for `pyproject_toml_project/../hatch_project/requirements.yaml[test]` ๐Ÿ“„ Parsing `setup_py_project/requirements.yaml` ๐Ÿ“„ Parsing `../setuptools_project` from `local_dependencies` ๐Ÿ“„ Parsing `setup_py_project/../setuptools_project/requirements.yaml` +๐Ÿ“„ Parsing `../hatch_project[test]` from `local_dependencies` +๐Ÿ“„ Parsing `setup_py_project/../setuptools_project/../hatch_project/requirements.yaml[test]` +๐Ÿ“„ Moving `test` optional dependencies to main dependencies for `setup_py_project/../setuptools_project/../hatch_project/requirements.yaml[test]` ๐Ÿ“„ Parsing `setuptools_project/requirements.yaml` ๐Ÿ“ Generating environment file at `environment.yaml` ๐Ÿ“ Environment file generated successfully. @@ -155,11 +155,11 @@ unidep install-all -e ```bash $ unidep install-all -e --dry-run -๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive-scheduler numpy">=1.21" hpc05 pandas">=1,<3" pexpect adaptive">=0.15.0, <2.0.0" pfapack packaging pytest pytest-cov` +๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive-scheduler numpy">=1.21" hpc05 pandas">=1,<3" pexpect libzlib adaptive">=0.15.0, <2.0.0" pfapack packaging pytest pytest-cov` -๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install unidep markdown-code-runner numthreads yaml2bib rsync-time-machine slurm-usage pyyaml aiokef` +๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install unidep yaml2bib rsync-time-machine slurm-usage pyyaml aiokef markdown-code-runner numthreads` -๐Ÿ“ Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} +๐Ÿ“ Found local dependencies: {'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} ๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install --no-dependencies -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project` diff --git a/example/environment.yaml b/example/environment.yaml index 185d8b90..ff4c4307 100644 --- a/example/environment.yaml +++ b/example/environment.yaml @@ -12,6 +12,7 @@ dependencies: - pandas >=1,<3 - sel(linux): pexpect - sel(osx): pexpect + - libzlib - sel(linux): adaptive >=0.15.0, <2.0.0 - sel(linux): pfapack - packaging @@ -19,14 +20,14 @@ dependencies: - pytest-cov - pip: - unidep - - markdown-code-runner - - numthreads - yaml2bib; sys_platform == 'linux' and platform_machine == 'x86_64' - rsync-time-machine - slurm-usage - fileup; sys_platform == 'darwin' - pyyaml - aiokef + - markdown-code-runner + - numthreads platforms: - linux-64 - osx-64 From 37f138cbda5497eeb31f0ca8f6f7e2eaf38a6b5c Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 16:12:40 -0700 Subject: [PATCH 13/39] test --- tests/test_pixi_lock.py | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/tests/test_pixi_lock.py b/tests/test_pixi_lock.py index 768ec7b3..4a6830b8 100644 --- a/tests/test_pixi_lock.py +++ b/tests/test_pixi_lock.py @@ -29,17 +29,40 @@ def test_pixi_lock_command(tmp_path: Path) -> None: extra_flags=["--", "--micromamba"], ) with YAML(typ="safe") as yaml: - with (folder / "project1" / "conda-lock.yml").open() as f: + with (folder / "project1" / "pixi.lock").open() as f: lock1 = yaml.load(f) - with (folder / "project2" / "conda-lock.yml").open() as f: + with (folder / "project2" / "pixi.lock").open() as f: lock2 = yaml.load(f) - assert [p["name"] for p in lock1["package"] if p["platform"] == "osx-arm64"] == [ - "bzip2", - "python_abi", - "tzdata", + assert lock1["environments"]["default"]["packages"]["osx-64"] == [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", + }, ] - assert [p["name"] for p in lock2["package"] if p["platform"] == "osx-arm64"] == [ - "python_abi", - "tzdata", + assert lock1["environments"]["default"]["packages"]["osx-arm64"] == [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", + }, + ] + assert lock2["environments"]["default"]["packages"]["osx-64"] == [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", + }, + ] + assert lock2["environments"]["default"]["packages"]["osx-arm64"] == [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", + }, ] From e966f0b0f5bbf441c1a1d4301d98c420787ce7d5 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 16:46:48 -0700 Subject: [PATCH 14/39] wip --- .../simple_monorepo/common-requirements.yaml | 3 + tests/simple_monorepo/pixi.lock | 2 +- tests/simple_monorepo/project1/pixi.lock | 21 ++++- tests/simple_monorepo/project2/pixi.lock | 17 ++++- tests/test_pixi_lock.py | 71 ++++++++--------- unidep/_pixi_lock.py | 76 ++++++++++++++----- 6 files changed, 129 insertions(+), 61 deletions(-) diff --git a/tests/simple_monorepo/common-requirements.yaml b/tests/simple_monorepo/common-requirements.yaml index 35bc8687..aec7b789 100644 --- a/tests/simple_monorepo/common-requirements.yaml +++ b/tests/simple_monorepo/common-requirements.yaml @@ -5,3 +5,6 @@ channels: - conda-forge dependencies: - conda: python_abi +platforms: + - osx-64 + - osx-arm64 diff --git a/tests/simple_monorepo/pixi.lock b/tests/simple_monorepo/pixi.lock index d2cb5ac8..8d532893 100644 --- a/tests/simple_monorepo/pixi.lock +++ b/tests/simple_monorepo/pixi.lock @@ -1,6 +1,6 @@ # This file is created and managed by `unidep` 0.63.2. # For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock` +# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` # # This environment can be installed with # `pixi install` diff --git a/tests/simple_monorepo/project1/pixi.lock b/tests/simple_monorepo/project1/pixi.lock index 3bad4815..189e0976 100644 --- a/tests/simple_monorepo/project1/pixi.lock +++ b/tests/simple_monorepo/project1/pixi.lock @@ -1,15 +1,30 @@ # This file is created and managed by `unidep` 0.63.2. # For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock` +# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` # # This environment can be installed with # `pixi install` # This file is a `pixi.lock` file generated via `unidep`. -# For details see https://github.com/pyx/conda-pix +# For details see https://pixi.sh/ environments: default: channels: - url: https://conda.anaconda.org/conda-forge/ -packages: {} + packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda +packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda version: 5 diff --git a/tests/simple_monorepo/project2/pixi.lock b/tests/simple_monorepo/project2/pixi.lock index 3bad4815..8d00a60d 100644 --- a/tests/simple_monorepo/project2/pixi.lock +++ b/tests/simple_monorepo/project2/pixi.lock @@ -1,15 +1,26 @@ # This file is created and managed by `unidep` 0.63.2. # For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock` +# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` # # This environment can be installed with # `pixi install` # This file is a `pixi.lock` file generated via `unidep`. -# For details see https://github.com/pyx/conda-pix +# For details see https://pixi.sh/ environments: default: channels: - url: https://conda.anaconda.org/conda-forge/ -packages: {} + packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda +packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda version: 5 diff --git a/tests/test_pixi_lock.py b/tests/test_pixi_lock.py index 4a6830b8..c4d3a634 100644 --- a/tests/test_pixi_lock.py +++ b/tests/test_pixi_lock.py @@ -19,7 +19,7 @@ def test_pixi_lock_command(tmp_path: Path) -> None: depth=1, directory=folder, files=None, - platforms=["linux-64", "osx-arm64"], + platforms=["osx-64", "osx-arm64"], verbose=True, only_global=False, check_input_hash=True, @@ -33,36 +33,39 @@ def test_pixi_lock_command(tmp_path: Path) -> None: lock1 = yaml.load(f) with (folder / "project2" / "pixi.lock").open() as f: lock2 = yaml.load(f) - - assert lock1["environments"]["default"]["packages"]["osx-64"] == [ - { - "conda": "https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda", - }, - { - "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", - }, - ] - assert lock1["environments"]["default"]["packages"]["osx-arm64"] == [ - { - "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda", - }, - { - "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", - }, - { - "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", - }, - ] - assert lock2["environments"]["default"]["packages"]["osx-64"] == [ - { - "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", - }, - ] - assert lock2["environments"]["default"]["packages"]["osx-arm64"] == [ - { - "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", - }, - { - "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", - }, - ] + assert lock1["environments"]["default"]["packages"] == { + "osx-64": [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", + }, + ], + "osx-arm64": [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", + }, + ], + } + assert lock2["environments"]["default"]["packages"] == { + "osx-64": [ + { + "conda": "https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda", + }, + ], + "osx-arm64": [ + { + "conda": "https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda", + }, + { + "conda": "https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda", + }, + ], + } diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index bdd79559..ca9feea0 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -119,12 +119,14 @@ class PixiLockSpec(NamedTuple): """A specification of the pixi lock file.""" packages: dict[tuple[Platform, str], dict[str, Any]] + dependencies: dict[tuple[Platform, str], set[str]] def _parse_pixi_lock_packages( pixi_lock_data: dict[str, Any], ) -> PixiLockSpec: packages = {} + dependencies = {} environments = pixi_lock_data.get("environments", {}) for env_name, env_data in environments.items(): channels = env_data.get("channels", []) @@ -153,7 +155,39 @@ def _parse_pixi_lock_packages( "url": url, "version": package_version, } - return PixiLockSpec(packages=packages) + + # Download and parse dependencies + pkg_dependencies = _download_and_get_dependencies(url) + dependencies[key] = pkg_dependencies + return PixiLockSpec(packages=packages, dependencies=dependencies) + + +def _download_and_get_dependencies(url: str) -> set[str]: + import json + import tarfile + import tempfile + import urllib.request + from pathlib import Path + + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + file_name = url.split("/")[-1] + file_path = temp_path / file_name + urllib.request.urlretrieve(url, str(file_path)) + + if file_name.endswith(".tar.bz2"): + with tarfile.open(file_path, "r:bz2") as tar: + try: + index_file = tar.extractfile("info/index.json") + index_json = json.load(index_file) + return set(index_json.get("depends", [])) + except KeyError: + return set() + elif file_name.endswith(".conda"): + # Handle .conda packages (requires conda_package_handling) + # Similar to the conda-lock implementation + pass + return set() def _pixi_lock_subpackage( @@ -165,6 +199,22 @@ def _pixi_lock_subpackage( ) -> Path: requirements = parse_requirements(file) locked_entries: dict[Platform, list[dict]] = defaultdict(list) + locked_keys: set[tuple[Platform, str]] = set() + missing_keys: set[tuple[Platform, str]] = set() + + def add_package_with_dependencies(platform: Platform, name: str): + key = (platform, name) + if key in locked_keys: + return + if key not in lock_spec.packages: + missing_keys.add(key) + return + pkg_entry = lock_spec.packages[key]["package"] + locked_entries[platform].append(pkg_entry) + locked_keys.add(key) + for dep in lock_spec.dependencies.get(key, set()): + dep_name = dep.split(" ")[0] # Remove version specifiers + add_package_with_dependencies(platform, dep_name) for name, specs in requirements.requirements.items(): if name.startswith("__"): @@ -177,23 +227,10 @@ def _pixi_lock_subpackage( _platforms = [p for p in _platforms if p in platforms] for _platform in _platforms: - key = (_platform, name) - if key in lock_spec.packages: - pkg_entry = lock_spec.packages[key]["package"] - locked_entries[_platform].append(pkg_entry) - else: - print( - f"โš ๏ธ Package {name} for platform {_platform} not found" - " in global lock file.", - ) - - urls = defaultdict(list) - packages_list = [] - for platform, entries in locked_entries.items(): - for entry in entries: - for url in entry.values(): - urls[platform].append(url) - packages_list.append(entry) + add_package_with_dependencies(_platform, name) + + if missing_keys: + print(f"โš ๏ธ Missing packages: {missing_keys}") # Generate subproject pixi.lock pixi_lock_output = file.parent / "pixi.lock" @@ -226,7 +263,7 @@ def _pixi_lock_subpackage( "# This environment can be installed with", "# `pixi install`", "# This file is a `pixi.lock` file generated via `unidep`.", - "# For details see https://github.com/pyx/conda-pix", + "# For details see https://pixi.sh/", ], ) return pixi_lock_output @@ -311,7 +348,6 @@ def pixi_lock_command( global_lock_data = yaml.load(fp) lock_spec = _parse_pixi_lock_packages(global_lock_data) - print(f"{lock_spec=}") sub_lock_files = [] found_files = find_requirements_files(directory, depth) for file in found_files: From 189584df4fe85cdd9695912ba67f810fa82f570a Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 17:08:36 -0700 Subject: [PATCH 15/39] wip --- unidep/_pixi_lock.py | 137 ++++++++++++++++++++++++------------------- 1 file changed, 78 insertions(+), 59 deletions(-) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index ca9feea0..9cdc6500 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -125,69 +125,83 @@ class PixiLockSpec(NamedTuple): def _parse_pixi_lock_packages( pixi_lock_data: dict[str, Any], ) -> PixiLockSpec: - packages = {} - dependencies = {} + # Build a mapping from URL to package metadata + url_to_package = {} + for pkg in pixi_lock_data.get("packages", []): + url = pkg.get("url") + if url: + url_to_package[url] = pkg + + packages: dict[tuple[str, Platform, str], dict[str, Any]] = {} + dependencies_raw: dict[Platform, dict[str, set[str]]] = defaultdict( + lambda: defaultdict(set), + ) + environments = pixi_lock_data.get("environments", {}) for env_name, env_data in environments.items(): channels = env_data.get("channels", []) for platform, packages_list in env_data.get("packages", {}).items(): for pkg_entry in packages_list: for manager, url in pkg_entry.items(): - # Extract package name from URL - package_filename = url.split("/")[-1] - # Remove the extension - if package_filename.endswith((".conda", ".tar.bz2")): - package_filename = package_filename.rsplit(".", 1)[0] - # For conda packages, format is name-version-build - parts = package_filename.split("-") - if len(parts) >= 3: - package_name = "-".join(parts[:-2]) - package_version = parts[-2] - else: - package_name = parts[0] - package_version = parts[1] if len(parts) > 1 else "" - key = (platform, package_name) + # manager is expected to be "conda" + pkg_metadata = url_to_package.get(url) + if not pkg_metadata: + print(f"โš ๏ธ Missing metadata for package at URL {url}") + continue + package_name = pkg_metadata.get("name") + key = (manager, platform, package_name) + if key in packages: + continue # avoid duplicates packages[key] = { - "environment": env_name, - "channels": channels, - "package": pkg_entry, "manager": manager, + "platform": platform, + "name": package_name, + "package_metadata": pkg_metadata, + "channels": channels, "url": url, - "version": package_version, } + # Extract dependencies + depends = pkg_metadata.get("depends", []) + dependencies_raw[platform][package_name].update( + dep.split(" ")[0] for dep in depends + ) + + # Now resolve dependencies recursively, similar to conda-lock + resolved_dependencies: dict[Platform, dict[str, set[str]]] = {} + for platform, pkgs in dependencies_raw.items(): + resolved_pkgs: dict[str, set[str]] = {} + for package in pkgs: + _recurse_pixi(package, resolved_pkgs, pkgs, set()) + resolved_dependencies[platform] = resolved_pkgs + + # Flatten the dependencies dict to match the packages keys + dependencies_flat = { + (manager, platform, name): deps + for (manager, platform, name) in packages + for name_, deps in resolved_dependencies[platform].items() + if name_ == packages[(manager, platform, name)]["name"] + } - # Download and parse dependencies - pkg_dependencies = _download_and_get_dependencies(url) - dependencies[key] = pkg_dependencies - return PixiLockSpec(packages=packages, dependencies=dependencies) - + return PixiLockSpec(packages, dependencies_flat) -def _download_and_get_dependencies(url: str) -> set[str]: - import json - import tarfile - import tempfile - import urllib.request - from pathlib import Path - with tempfile.TemporaryDirectory() as temp_dir: - temp_path = Path(temp_dir) - file_name = url.split("/")[-1] - file_path = temp_path / file_name - urllib.request.urlretrieve(url, str(file_path)) - - if file_name.endswith(".tar.bz2"): - with tarfile.open(file_path, "r:bz2") as tar: - try: - index_file = tar.extractfile("info/index.json") - index_json = json.load(index_file) - return set(index_json.get("depends", [])) - except KeyError: - return set() - elif file_name.endswith(".conda"): - # Handle .conda packages (requires conda_package_handling) - # Similar to the conda-lock implementation - pass - return set() +def _recurse_pixi( + package_name: str, + resolved: dict[str, set[str]], + dependencies: dict[str, set[str]], + seen: set[str], +) -> set[str]: + if package_name in resolved: + return resolved[package_name] + if package_name in seen: + return set() + seen.add(package_name) + all_deps = set(dependencies.get(package_name, [])) + for dep in dependencies.get(package_name, []): + all_deps.update(_recurse_pixi(dep, resolved, dependencies, seen)) + resolved[package_name] = all_deps + seen.remove(package_name) + return all_deps def _pixi_lock_subpackage( @@ -195,25 +209,30 @@ def _pixi_lock_subpackage( file: Path, lock_spec: PixiLockSpec, platforms: list[Platform], - yaml: YAML | None, # Passing this to preserve order! + yaml: YAML | None, ) -> Path: requirements = parse_requirements(file) locked_entries: dict[Platform, list[dict]] = defaultdict(list) - locked_keys: set[tuple[Platform, str]] = set() - missing_keys: set[tuple[Platform, str]] = set() + locked_packages: list[dict] = [] + locked_keys: set[tuple[str, Platform, str]] = set() + missing_keys: set[tuple[str, Platform, str]] = set() def add_package_with_dependencies(platform: Platform, name: str): - key = (platform, name) + key = ("conda", platform, name) if key in locked_keys: return if key not in lock_spec.packages: missing_keys.add(key) return - pkg_entry = lock_spec.packages[key]["package"] - locked_entries[platform].append(pkg_entry) + pkg_info = lock_spec.packages[key] + # Add to locked_entries + locked_entries[platform].append({pkg_info["manager"]: pkg_info["url"]}) + # Add to locked_packages + locked_packages.append(pkg_info["package_metadata"]) locked_keys.add(key) - for dep in lock_spec.dependencies.get(key, set()): - dep_name = dep.split(" ")[0] # Remove version specifiers + # Recursively add dependencies + dependencies = lock_spec.dependencies.get(key, set()) + for dep_name in dependencies: add_package_with_dependencies(platform, dep_name) for name, specs in requirements.requirements.items(): @@ -244,7 +263,7 @@ def add_package_with_dependencies(platform: Platform, name: str): "packages": dict(locked_entries), }, }, - "packages": dict(locked_entries), + "packages": locked_packages, } if yaml is None: From d966163b3ed58a1e03705ebea2feba5461349754 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 17:10:41 -0700 Subject: [PATCH 16/39] . --- tests/simple_monorepo/project1/pixi.lock | 79 +++++++++++++++++++++--- tests/simple_monorepo/project2/pixi.lock | 47 ++++++++++++-- 2 files changed, 114 insertions(+), 12 deletions(-) diff --git a/tests/simple_monorepo/project1/pixi.lock b/tests/simple_monorepo/project1/pixi.lock index 189e0976..7256091e 100644 --- a/tests/simple_monorepo/project1/pixi.lock +++ b/tests/simple_monorepo/project1/pixi.lock @@ -20,11 +20,76 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda packages: - osx-64: - - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda - osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda +- build: hfdf4475_7 + build_number: 7 + depends: + - __osx >=10.13 + kind: conda + license: bzip2-1.0.6 + license_family: BSD + md5: 7ed4301d437b59045be7e051a0308211 + name: bzip2 + sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 + size: 134188 + subdir: osx-64 + timestamp: 1720974491916 + url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + version: 1.0.8 +- build: h99b78c6_7 + build_number: 7 + depends: + - __osx >=11.0 + kind: conda + license: bzip2-1.0.6 + license_family: BSD + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab + name: bzip2 + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + size: 122909 + subdir: osx-arm64 + timestamp: 1720974522888 + url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + version: 1.0.8 +- build: hc8b5060_0 + kind: conda + license: LicenseRef-Public-Domain + md5: 8ac3367aafb1cc0a068483c580af8015 + name: tzdata + noarch: generic + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + size: 122354 + subdir: noarch + timestamp: 1728047496079 + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + version: 2024b +- build: 5_cp313t + build_number: 5 + constrains: + - python 3.13.* *_cp313t + kind: conda + license: BSD-3-Clause + license_family: BSD + md5: 32ba8fc57ccb0b48dd6006974f65c525 + name: python_abi + sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 + size: 6300 + subdir: osx-64 + timestamp: 1723823108577 + url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + version: '3.13' +- build: 5_cp313t + build_number: 5 + constrains: + - python 3.13.* *_cp313t + kind: conda + license: BSD-3-Clause + license_family: BSD + md5: 24a9a05eba65586da53ad7b56a06dc02 + name: python_abi + sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b + size: 6317 + subdir: osx-arm64 + timestamp: 1723823118660 + url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda + version: '3.13' version: 5 diff --git a/tests/simple_monorepo/project2/pixi.lock b/tests/simple_monorepo/project2/pixi.lock index 8d00a60d..64346111 100644 --- a/tests/simple_monorepo/project2/pixi.lock +++ b/tests/simple_monorepo/project2/pixi.lock @@ -18,9 +18,46 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda packages: - osx-64: - - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda - osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda +- build: hc8b5060_0 + kind: conda + license: LicenseRef-Public-Domain + md5: 8ac3367aafb1cc0a068483c580af8015 + name: tzdata + noarch: generic + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + size: 122354 + subdir: noarch + timestamp: 1728047496079 + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + version: 2024b +- build: 5_cp313t + build_number: 5 + constrains: + - python 3.13.* *_cp313t + kind: conda + license: BSD-3-Clause + license_family: BSD + md5: 32ba8fc57ccb0b48dd6006974f65c525 + name: python_abi + sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 + size: 6300 + subdir: osx-64 + timestamp: 1723823108577 + url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + version: '3.13' +- build: 5_cp313t + build_number: 5 + constrains: + - python 3.13.* *_cp313t + kind: conda + license: BSD-3-Clause + license_family: BSD + md5: 24a9a05eba65586da53ad7b56a06dc02 + name: python_abi + sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b + size: 6317 + subdir: osx-arm64 + timestamp: 1723823118660 + url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda + version: '3.13' version: 5 From 5be1b00cf69606566b1cbad7d66460e86c434d5f Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 21:57:00 -0700 Subject: [PATCH 17/39] rev --- example/pyproject_toml_project/pyproject.toml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/example/pyproject_toml_project/pyproject.toml b/example/pyproject_toml_project/pyproject.toml index 9d4a6349..cbb3f17a 100644 --- a/example/pyproject_toml_project/pyproject.toml +++ b/example/pyproject_toml_project/pyproject.toml @@ -18,11 +18,15 @@ py-modules = ["pyproject_toml_project"] [tool.unidep] channels = ["conda-forge"] dependencies = [ - "libzlib" + "adaptive:linux64", + "pfapack:linux64", + "packaging", + { pip = "markdown-code-runner" }, + { pip = "numthreads" }, ] -# local_dependencies = [ -# "../hatch_project[test]", # Local dependency with optional dependencies -# ] -# [tool.unidep.optional_dependencies] -# dev = ["mypy", "ruff"] -# test = ["pytest"] +local_dependencies = [ + "../hatch_project[test]", # Local dependency with optional dependencies +] +[tool.unidep.optional_dependencies] +dev = ["mypy", "ruff"] +test = ["pytest"] From cc21b5d64228b9a600a66b81416650374348954c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 5 Oct 2024 04:59:33 +0000 Subject: [PATCH 18/39] Update files from markdown-code-runner --- example/README.md | 12 ++++++------ example/environment.yaml | 5 ++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/example/README.md b/example/README.md index 790c0eb5..6d2547c5 100644 --- a/example/README.md +++ b/example/README.md @@ -61,12 +61,12 @@ This would be the same as running `unidep merge --name myenv --verbose`: ๐Ÿ“„ Parsing `hatch2_project/pyproject.toml` ๐Ÿ“„ Parsing `hatch_project/requirements.yaml` ๐Ÿ“„ Parsing `pyproject_toml_project/pyproject.toml` +๐Ÿ“„ Parsing `../hatch_project[test]` from `local_dependencies` +๐Ÿ“„ Parsing `pyproject_toml_project/../hatch_project/requirements.yaml[test]` +๐Ÿ“„ Moving `test` optional dependencies to main dependencies for `pyproject_toml_project/../hatch_project/requirements.yaml[test]` ๐Ÿ“„ Parsing `setup_py_project/requirements.yaml` ๐Ÿ“„ Parsing `../setuptools_project` from `local_dependencies` ๐Ÿ“„ Parsing `setup_py_project/../setuptools_project/requirements.yaml` -๐Ÿ“„ Parsing `../hatch_project[test]` from `local_dependencies` -๐Ÿ“„ Parsing `setup_py_project/../setuptools_project/../hatch_project/requirements.yaml[test]` -๐Ÿ“„ Moving `test` optional dependencies to main dependencies for `setup_py_project/../setuptools_project/../hatch_project/requirements.yaml[test]` ๐Ÿ“„ Parsing `setuptools_project/requirements.yaml` ๐Ÿ“ Generating environment file at `environment.yaml` ๐Ÿ“ Environment file generated successfully. @@ -155,11 +155,11 @@ unidep install-all -e ```bash $ unidep install-all -e --dry-run -๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive-scheduler numpy">=1.21" hpc05 pandas">=1,<3" pexpect libzlib adaptive">=0.15.0, <2.0.0" pfapack packaging pytest pytest-cov` +๐Ÿ“ฆ Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive-scheduler numpy">=1.21" hpc05 pandas">=1,<3" pexpect adaptive">=0.15.0, <2.0.0" pfapack packaging pytest pytest-cov` -๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install unidep yaml2bib rsync-time-machine slurm-usage pyyaml aiokef markdown-code-runner numthreads` +๐Ÿ“ฆ Installing pip dependencies with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install unidep markdown-code-runner numthreads yaml2bib rsync-time-machine slurm-usage pyyaml aiokef` -๐Ÿ“ Found local dependencies: {'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} +๐Ÿ“ Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} ๐Ÿ“ฆ Installing project with `/opt/hostedtoolcache/Python/3.12.6/x64/bin/python -m pip install --no-dependencies -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project` diff --git a/example/environment.yaml b/example/environment.yaml index ff4c4307..185d8b90 100644 --- a/example/environment.yaml +++ b/example/environment.yaml @@ -12,7 +12,6 @@ dependencies: - pandas >=1,<3 - sel(linux): pexpect - sel(osx): pexpect - - libzlib - sel(linux): adaptive >=0.15.0, <2.0.0 - sel(linux): pfapack - packaging @@ -20,14 +19,14 @@ dependencies: - pytest-cov - pip: - unidep + - markdown-code-runner + - numthreads - yaml2bib; sys_platform == 'linux' and platform_machine == 'x86_64' - rsync-time-machine - slurm-usage - fileup; sys_platform == 'darwin' - pyyaml - aiokef - - markdown-code-runner - - numthreads platforms: - linux-64 - osx-64 From 280a8d2426eca862c0539d87f82a497b32eb4557 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 23:16:34 -0700 Subject: [PATCH 19/39] wip --- example/hatch_project/requirements.yaml | 2 +- tests/test_pixi_lock.py | 1 - unidep/_cli.py | 6 - unidep/_pixi_lock.py | 193 ++++++++++++------------ 4 files changed, 101 insertions(+), 101 deletions(-) diff --git a/example/hatch_project/requirements.yaml b/example/hatch_project/requirements.yaml index 60d99708..b0e988df 100644 --- a/example/hatch_project/requirements.yaml +++ b/example/hatch_project/requirements.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - conda: adaptive-scheduler # [linux64] - - pip: unidep + - pip: unidep # [linux64] - numpy >=1.21 - hpc05 # [linux64] - pandas >=1,<3 diff --git a/tests/test_pixi_lock.py b/tests/test_pixi_lock.py index c4d3a634..f819c85f 100644 --- a/tests/test_pixi_lock.py +++ b/tests/test_pixi_lock.py @@ -22,7 +22,6 @@ def test_pixi_lock_command(tmp_path: Path) -> None: platforms=["osx-64", "osx-arm64"], verbose=True, only_global=False, - check_input_hash=True, ignore_pins=[], overwrite_pins=[], skip_dependencies=[], diff --git a/unidep/_cli.py b/unidep/_cli.py index 5bf17008..8acf8a07 100755 --- a/unidep/_cli.py +++ b/unidep/_cli.py @@ -522,11 +522,6 @@ def _parse_args() -> argparse.Namespace: # noqa: PLR0915 " in current directory). Path should be relative, e.g.," " `--lockfile ./locks/pixi.lock`.", ) - parser_pixi_lock.add_argument( - "--check-input-hash", - action="store_true", - help="Check existing input hashes in lockfiles before regenerating lock files.", - ) _add_common_args( parser_pixi_lock, { @@ -1614,7 +1609,6 @@ def main() -> None: # noqa: PLR0912 ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, - check_input_hash=args.check_input_hash, extra_flags=args.extra_flags, ) elif args.command == "pip-compile": # pragma: no cover diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 9cdc6500..b7f3649b 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -1,5 +1,6 @@ from __future__ import annotations +import re import shutil import subprocess import sys @@ -14,38 +15,27 @@ if TYPE_CHECKING: from pathlib import Path - from unidep.platform_definitions import Platform - - if sys.version_info >= (3, 8): - pass - else: - pass + from unidep.platform_definitions import CondaPip, Platform def _run_pixi_lock( pixi_toml: Path, pixi_lock_output: Path, *, - check_input_hash: bool = False, extra_flags: list[str], ) -> None: if shutil.which("pixi") is None: msg = ( "Cannot find `pixi`." - " Please install it with `mamba install -c conda-forge pixi`." + " Please install it, see the documentation" + " at https://pixi.sh/latest/" ) raise RuntimeError(msg) - if not check_input_hash and pixi_lock_output.exists(): + if pixi_lock_output.exists(): print(f"๐Ÿ—‘๏ธ Removing existing `{pixi_lock_output}`") pixi_lock_output.unlink() - cmd = [ - "pixi", - "list", - *extra_flags, - ] - if check_input_hash: - cmd.append("--check-input-hash") + cmd = ["pixi", "list", *extra_flags] print(f"๐Ÿ”’ Locking dependencies with `{' '.join(cmd)}`\n") try: with change_directory(pixi_toml.parent): @@ -76,7 +66,6 @@ def _pixi_lock_global( files: list[Path] | None, platforms: list[Platform], verbose: bool, - check_input_hash: bool, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], @@ -108,7 +97,6 @@ def _pixi_lock_global( _run_pixi_lock( pixi_toml, pixi_lock_output, - check_input_hash=check_input_hash, extra_flags=extra_flags, ) print("โœ… Global dependencies locked successfully in `pixi.lock`.") @@ -118,71 +106,90 @@ def _pixi_lock_global( class PixiLockSpec(NamedTuple): """A specification of the pixi lock file.""" - packages: dict[tuple[Platform, str], dict[str, Any]] - dependencies: dict[tuple[Platform, str], set[str]] + packages: dict[tuple[CondaPip, Platform, str], list[dict[str, Any]]] + dependencies: dict[tuple[CondaPip, Platform, str], set[str]] + channels: list[dict[str, str]] + indexes: list[str] + + +def _filter_clean_deps(dependencies: list[str]) -> list[str]: + package_names = [] + for dep in dependencies: + # Split the dependency and the environment marker + if ";" in dep: + dep_part, marker_part = dep.split(";", 1) + marker_part = marker_part.strip() + else: + dep_part = dep + marker_part = "" + + # Skip if 'extra ==' is in the environment marker + if "extra ==" in marker_part: + continue + + # Extract the package name + dep_part = dep_part.strip() + package_name = re.split(r"[<>=!~\s]", dep_part)[0] + package_names.append(package_name) + + return package_names def _parse_pixi_lock_packages( pixi_lock_data: dict[str, Any], -) -> PixiLockSpec: +) -> dict[str, PixiLockSpec]: # Build a mapping from URL to package metadata - url_to_package = {} - for pkg in pixi_lock_data.get("packages", []): - url = pkg.get("url") - if url: - url_to_package[url] = pkg - - packages: dict[tuple[str, Platform, str], dict[str, Any]] = {} - dependencies_raw: dict[Platform, dict[str, set[str]]] = defaultdict( - lambda: defaultdict(set), - ) - + url_to_package = {pkg["url"]: pkg for pkg in pixi_lock_data.get("packages", [])} + lock_specs: dict[str, PixiLockSpec] = {} environments = pixi_lock_data.get("environments", {}) for env_name, env_data in environments.items(): - channels = env_data.get("channels", []) - for platform, packages_list in env_data.get("packages", {}).items(): - for pkg_entry in packages_list: - for manager, url in pkg_entry.items(): - # manager is expected to be "conda" - pkg_metadata = url_to_package.get(url) - if not pkg_metadata: - print(f"โš ๏ธ Missing metadata for package at URL {url}") - continue - package_name = pkg_metadata.get("name") - key = (manager, platform, package_name) - if key in packages: - continue # avoid duplicates - packages[key] = { - "manager": manager, - "platform": platform, - "name": package_name, - "package_metadata": pkg_metadata, - "channels": channels, - "url": url, - } - # Extract dependencies - depends = pkg_metadata.get("depends", []) - dependencies_raw[platform][package_name].update( - dep.split(" ")[0] for dep in depends + deps: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = defaultdict( + lambda: defaultdict(lambda: defaultdict(set)), + ) + for platform, packages_dict in env_data.get("packages", {}).items(): + for manager_url in packages_dict: + for manager, url in manager_url.items(): + dep = url_to_package[url] + name = dep["name"] + depends = dep.get( + "depends" if manager == "conda" else "requires_dict", + [], ) + deps[manager][platform][name].update(_filter_clean_deps(depends)) + + resolved: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = {} + for manager, platforms in deps.items(): + resolved_manager = resolved.setdefault(manager, {}) + for _platform, pkgs in platforms.items(): + _resolved: dict[str, set[str]] = {} + for package in list(pkgs): + _recurse_pixi(package, _resolved, pkgs, set()) + resolved_manager[_platform] = _resolved + + packages: dict[tuple[CondaPip, Platform, str], list[dict[str, Any]]] = ( + defaultdict(list) + ) + for p in pixi_lock_data.get("packages", []): + key = (p["kind"], p.get("subdir"), p["name"]) + # Could be multiple entries for the same package, + # e.g., different wheels for different OS versions + packages[key].append(p) + + # Flatten the `dependencies` dict to same format as `packages` + dependencies = { + (which, platform, name): deps + for which, platforms in resolved.items() + for platform, pkgs in platforms.items() + for name, deps in pkgs.items() + } + lock_specs[env_name] = PixiLockSpec( + packages, + dependencies, + env_data.get("channels", []), + env_data.get("indexes", []), + ) - # Now resolve dependencies recursively, similar to conda-lock - resolved_dependencies: dict[Platform, dict[str, set[str]]] = {} - for platform, pkgs in dependencies_raw.items(): - resolved_pkgs: dict[str, set[str]] = {} - for package in pkgs: - _recurse_pixi(package, resolved_pkgs, pkgs, set()) - resolved_dependencies[platform] = resolved_pkgs - - # Flatten the dependencies dict to match the packages keys - dependencies_flat = { - (manager, platform, name): deps - for (manager, platform, name) in packages - for name_, deps in resolved_dependencies[platform].items() - if name_ == packages[(manager, platform, name)]["name"] - } - - return PixiLockSpec(packages, dependencies_flat) + return lock_specs def _recurse_pixi( @@ -193,12 +200,14 @@ def _recurse_pixi( ) -> set[str]: if package_name in resolved: return resolved[package_name] - if package_name in seen: + if package_name in seen: # Circular dependency detected return set() seen.add(package_name) + all_deps = set(dependencies.get(package_name, [])) for dep in dependencies.get(package_name, []): all_deps.update(_recurse_pixi(dep, resolved, dependencies, seen)) + resolved[package_name] = all_deps seen.remove(package_name) return all_deps @@ -214,21 +223,22 @@ def _pixi_lock_subpackage( requirements = parse_requirements(file) locked_entries: dict[Platform, list[dict]] = defaultdict(list) locked_packages: list[dict] = [] - locked_keys: set[tuple[str, Platform, str]] = set() - missing_keys: set[tuple[str, Platform, str]] = set() + locked_keys: set[tuple[CondaPip, Platform, str]] = set() + missing_keys: set[tuple[CondaPip, Platform, str]] = set() - def add_package_with_dependencies(platform: Platform, name: str): - key = ("conda", platform, name) + def add_package_with_dependencies(platform: Platform, name: str) -> None: + key: tuple[CondaPip, Platform, str] = ("conda", platform, name) if key in locked_keys: return if key not in lock_spec.packages: missing_keys.add(key) return - pkg_info = lock_spec.packages[key] - # Add to locked_entries - locked_entries[platform].append({pkg_info["manager"]: pkg_info["url"]}) - # Add to locked_packages - locked_packages.append(pkg_info["package_metadata"]) + pkg_infos = lock_spec.packages[key] + for pkg_info in pkg_infos: + # Add to locked_entries + locked_entries[platform].append({pkg_info["kind"]: pkg_info["url"]}) + # Add to locked_packages + locked_packages.append(pkg_info) locked_keys.add(key) # Recursively add dependencies dependencies = lock_spec.dependencies.get(key, set()) @@ -257,9 +267,8 @@ def add_package_with_dependencies(platform: Platform, name: str): "version": 5, "environments": { "default": { - "channels": lock_spec.packages[next(iter(lock_spec.packages))][ - "channels" - ], + "channels": lock_spec.channels, + "indexes": lock_spec.indexes, "packages": dict(locked_entries), }, }, @@ -302,7 +311,7 @@ def _check_consistent_lock_files( for packages_list in env_data.get("packages", {}).values(): for pkg_entry in packages_list: # pkg_entry is a dict like {'conda': 'url'} - for manager, url in pkg_entry.items(): + for url in pkg_entry.values(): global_packages.add(url) mismatches = [] @@ -315,7 +324,7 @@ def _check_consistent_lock_files( for env_data in environments.values(): for packages_list in env_data.get("packages", {}).values(): for pkg_entry in packages_list: - for manager, url in pkg_entry.items(): + for url in pkg_entry.values(): sub_packages.add(url) if not sub_packages.issubset(global_packages): @@ -335,7 +344,6 @@ def pixi_lock_command( platforms: list[Platform], verbose: bool, only_global: bool, - check_input_hash: bool, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], @@ -354,7 +362,6 @@ def pixi_lock_command( files=files, platforms=platforms, verbose=verbose, - check_input_hash=check_input_hash, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, @@ -366,7 +373,7 @@ def pixi_lock_command( with YAML(typ="safe") as yaml, pixi_lock_output.open() as fp: global_lock_data = yaml.load(fp) - lock_spec = _parse_pixi_lock_packages(global_lock_data) + lock_specs = _parse_pixi_lock_packages(global_lock_data)["default"] sub_lock_files = [] found_files = find_requirements_files(directory, depth) for file in found_files: @@ -374,7 +381,7 @@ def pixi_lock_command( continue sublock_file = _pixi_lock_subpackage( file=file, - lock_spec=lock_spec, + lock_spec=lock_specs, platforms=platforms, yaml=yaml, ) From 0b6373c01ea8dc326255bce1e0d8b1ecfde2ee0f Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 23:23:39 -0700 Subject: [PATCH 20/39] . --- unidep/_pixi_lock.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index b7f3649b..207ecabe 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -226,8 +226,12 @@ def _pixi_lock_subpackage( locked_keys: set[tuple[CondaPip, Platform, str]] = set() missing_keys: set[tuple[CondaPip, Platform, str]] = set() - def add_package_with_dependencies(platform: Platform, name: str) -> None: - key: tuple[CondaPip, Platform, str] = ("conda", platform, name) + def add_package_with_dependencies( + which: CondaPip, + platform: Platform, + name: str, + ) -> None: + key: tuple[CondaPip, Platform, str] = (which, platform, name) if key in locked_keys: return if key not in lock_spec.packages: @@ -243,7 +247,7 @@ def add_package_with_dependencies(platform: Platform, name: str) -> None: # Recursively add dependencies dependencies = lock_spec.dependencies.get(key, set()) for dep_name in dependencies: - add_package_with_dependencies(platform, dep_name) + add_package_with_dependencies(which, platform, dep_name) for name, specs in requirements.requirements.items(): if name.startswith("__"): @@ -256,7 +260,7 @@ def add_package_with_dependencies(platform: Platform, name: str) -> None: _platforms = [p for p in _platforms if p in platforms] for _platform in _platforms: - add_package_with_dependencies(_platform, name) + add_package_with_dependencies(spec.which, _platform, name) if missing_keys: print(f"โš ๏ธ Missing packages: {missing_keys}") From b6c5b0d1940bfa3a08a876cff61090298f43c63c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 5 Oct 2024 06:24:52 +0000 Subject: [PATCH 21/39] Update files from markdown-code-runner --- example/environment.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/example/environment.yaml b/example/environment.yaml index 185d8b90..8a9f97ca 100644 --- a/example/environment.yaml +++ b/example/environment.yaml @@ -18,7 +18,8 @@ dependencies: - pytest - pytest-cov - pip: - - unidep + - unidep; sys_platform == 'linux' and platform_machine == 'x86_64' + - unidep; sys_platform == 'darwin' - markdown-code-runner - numthreads - yaml2bib; sys_platform == 'linux' and platform_machine == 'x86_64' From 42d4d1e6dfd414292425bb6889755629b4881f9e Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 4 Oct 2024 23:26:27 -0700 Subject: [PATCH 22/39] wip --- unidep/_pixi_lock.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 207ecabe..d3972520 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -170,6 +170,8 @@ def _parse_pixi_lock_packages( defaultdict(list) ) for p in pixi_lock_data.get("packages", []): + # TODO: subdir is missing for pypi! This will cause issues + # later in the code. key = (p["kind"], p.get("subdir"), p["name"]) # Could be multiple entries for the same package, # e.g., different wheels for different OS versions From 2b33126057b01fa69d2c857ae8edc4d05da15983 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 5 Oct 2024 16:51:22 -0700 Subject: [PATCH 23/39] . --- pixi_create_sub_lock_file.py | 161 +++++++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 pixi_create_sub_lock_file.py diff --git a/pixi_create_sub_lock_file.py b/pixi_create_sub_lock_file.py new file mode 100644 index 00000000..c224b86b --- /dev/null +++ b/pixi_create_sub_lock_file.py @@ -0,0 +1,161 @@ +"""Create a subset of a lock file with a subset of packages.""" + +from __future__ import annotations + +import asyncio +import json +import os +import tempfile +from collections import defaultdict + +from rattler import ( + Environment, + GenericVirtualPackage, + LockFile, + Platform, + Version, + solve_with_sparse_repodata, +) +from rattler.channel import Channel, ChannelConfig +from rattler.match_spec import MatchSpec +from rattler.repo_data import SparseRepoData + + +def create_repodata_from_pixi_lock(lock_file_path: str) -> dict[str, dict]: + """Create repodata from a pixi lock file.""" + lock_file = LockFile.from_path(lock_file_path) + env = lock_file.default_environment() + repodata = {} + for platform in env.platforms(): + subdir = str(platform) + repodata[subdir] = { + "info": { + "subdir": subdir, + "base_url": f"https://conda.anaconda.org/conda-forge/{subdir}", + }, + "packages": {}, + "repodata_version": 2, + } + conda_packages = env.conda_repodata_records_for_platform(platform) + if not conda_packages: + return repodata + for package in conda_packages: + filename = ( + f"{package.name.normalized}-{package.version}-{package.build}.conda" + ) + repodata[subdir]["packages"][filename] = { # type: ignore[index] + "build": package.build, + "build_number": package.build_number, + "depends": package.depends, + "constrains": package.constrains, + "license": package.license, + "license_family": package.license_family, + "md5": package.md5.hex() if package.md5 else None, + "name": package.name.normalized, + "sha256": package.sha256.hex() if package.sha256 else None, + "size": package.size, + "subdir": package.subdir, + "timestamp": int(package.timestamp.timestamp() * 1000) + if package.timestamp + else None, + "version": str(package.version), + } + return repodata + + +def _version_requirement_to_lowest_version(version: str | None) -> str | None: + if version is None: + return None + if version.startswith(">="): + version = version[2:] + if version.startswith("=="): + version = version[2:] + version = version.split(",")[0] + return version # noqa: RET504 + + +def all_virtual_packages(env: Environment) -> dict[Platform, set[str]]: + """Get all virtual packages from an environment.""" + virtual_packages = defaultdict(set) + for platform, packages in env.packages_by_platform().items(): + for package in packages: + if not package.is_conda: + continue + repo_record = package.as_conda() + for dep in repo_record.depends: + spec = MatchSpec(dep) + if not spec.name.normalized.startswith("__"): + continue + version = _version_requirement_to_lowest_version(spec.version) + if version is None: + continue + virtual_package = GenericVirtualPackage( + spec.name, + version=Version(version), + build_string=spec.build or "*", + ) + virtual_packages[platform].add(virtual_package) + return virtual_packages + + +async def create_subset_lock_file( + original_lock_file_path: str, + required_packages: list[str], + platform: Platform, +) -> LockFile: + """Create a new lock file with a subset of packages from original lock file.""" + original_lock_file = LockFile.from_path(original_lock_file_path) + env = original_lock_file.default_environment() + conda_records = env.conda_repodata_records_for_platform(platform) + if conda_records is None: + msg = f"No conda records found for platform {platform}" + raise ValueError(msg) + repodata = create_repodata_from_pixi_lock(original_lock_file_path) + platform_repodata = repodata.get(str(platform)) + if platform_repodata is None: + msg = f"No repodata found for platform {platform}" + raise ValueError(msg) + + with tempfile.NamedTemporaryFile( + mode="w", + delete=False, + suffix=".json", + ) as temp_file: + json.dump(platform_repodata, temp_file) + temp_file_path = temp_file.name + print(f"Temporary repodata file: {temp_file_path}") + dummy_channel = Channel("dummy", ChannelConfig()) + sparse_repo_data = SparseRepoData(dummy_channel, str(platform), temp_file_path) + specs = [MatchSpec(f"{pkg}") for pkg in required_packages] + print(f"Specs: {specs}") + virtual_packages = all_virtual_packages(env)[platform] + print(f"Detected virtual packages: {virtual_packages}") + solved_records = await solve_with_sparse_repodata( + specs=specs, + sparse_repodata=[sparse_repo_data], + locked_packages=conda_records, + virtual_packages=virtual_packages, + ) + new_env = Environment("new_env", {platform: solved_records}) + new_lock_file = LockFile({"new_env": new_env}) + os.unlink(temp_file_path) # noqa: PTH108 + return new_lock_file + + +# Usage +async def main() -> None: + """Example usage of create_subset_lock_file.""" + original_lock_file_path = "pixi.lock" + required_packages = ["pandas", "scipy"] + platform = Platform("osx-arm64") + new_lock_file = await create_subset_lock_file( + original_lock_file_path, + required_packages, + platform, + ) + new_lock_file.to_path("new_lock_file.lock") + + +# Run the async function +if __name__ == "__main__": + asyncio.run(main()) From 277830397aaef0683b378f1600db3da2de901c97 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 5 Oct 2024 17:03:26 -0700 Subject: [PATCH 24/39] v0 --- pixi_create_sub_lock_file.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/pixi_create_sub_lock_file.py b/pixi_create_sub_lock_file.py index c224b86b..fd18efb8 100644 --- a/pixi_create_sub_lock_file.py +++ b/pixi_create_sub_lock_file.py @@ -87,11 +87,9 @@ def all_virtual_packages(env: Environment) -> dict[Platform, set[str]]: if not spec.name.normalized.startswith("__"): continue version = _version_requirement_to_lowest_version(spec.version) - if version is None: - continue virtual_package = GenericVirtualPackage( spec.name, - version=Version(version), + version=Version(version or "0"), build_string=spec.build or "*", ) virtual_packages[platform].add(virtual_package) @@ -142,12 +140,11 @@ async def create_subset_lock_file( return new_lock_file -# Usage async def main() -> None: """Example usage of create_subset_lock_file.""" original_lock_file_path = "pixi.lock" - required_packages = ["pandas", "scipy"] - platform = Platform("osx-arm64") + required_packages = ["tornado", "scipy", "ipykernel", "adaptive", "unidep"] + platform = Platform("linux-64") new_lock_file = await create_subset_lock_file( original_lock_file_path, required_packages, From 3e82adaeaea87a9da4285742f737ec466ad72dfa Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 5 Oct 2024 17:10:18 -0700 Subject: [PATCH 25/39] . --- pixi_create_sub_lock_file.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pixi_create_sub_lock_file.py b/pixi_create_sub_lock_file.py index fd18efb8..732f6a64 100644 --- a/pixi_create_sub_lock_file.py +++ b/pixi_create_sub_lock_file.py @@ -125,9 +125,7 @@ async def create_subset_lock_file( dummy_channel = Channel("dummy", ChannelConfig()) sparse_repo_data = SparseRepoData(dummy_channel, str(platform), temp_file_path) specs = [MatchSpec(f"{pkg}") for pkg in required_packages] - print(f"Specs: {specs}") virtual_packages = all_virtual_packages(env)[platform] - print(f"Detected virtual packages: {virtual_packages}") solved_records = await solve_with_sparse_repodata( specs=specs, sparse_repodata=[sparse_repo_data], @@ -143,7 +141,7 @@ async def create_subset_lock_file( async def main() -> None: """Example usage of create_subset_lock_file.""" original_lock_file_path = "pixi.lock" - required_packages = ["tornado", "scipy", "ipykernel", "adaptive", "unidep"] + required_packages = ["tornado", "scipy", "ipykernel", "adaptive"] platform = Platform("linux-64") new_lock_file = await create_subset_lock_file( original_lock_file_path, @@ -153,6 +151,5 @@ async def main() -> None: new_lock_file.to_path("new_lock_file.lock") -# Run the async function if __name__ == "__main__": asyncio.run(main()) From c193fb7f7dbd11037b45224cc34211ee779234a3 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 5 Oct 2024 17:17:02 -0700 Subject: [PATCH 26/39] simplify --- pixi_create_sub_lock_file.py | 69 ++++++++++++++++++------------------ 1 file changed, 34 insertions(+), 35 deletions(-) diff --git a/pixi_create_sub_lock_file.py b/pixi_create_sub_lock_file.py index 732f6a64..b4d0a4be 100644 --- a/pixi_create_sub_lock_file.py +++ b/pixi_create_sub_lock_file.py @@ -28,38 +28,37 @@ def create_repodata_from_pixi_lock(lock_file_path: str) -> dict[str, dict]: repodata = {} for platform in env.platforms(): subdir = str(platform) + packages = env.conda_repodata_records_for_platform(platform) + if not packages: + continue + repodata[subdir] = { "info": { "subdir": subdir, "base_url": f"https://conda.anaconda.org/conda-forge/{subdir}", }, - "packages": {}, + "packages": { + f"{pkg.name.normalized}-{pkg.version}-{pkg.build}.conda": { + "build": pkg.build, + "build_number": pkg.build_number, + "depends": pkg.depends, + "constrains": pkg.constrains, + "license": pkg.license, + "license_family": pkg.license_family, + "md5": pkg.md5.hex() if pkg.md5 else None, + "name": pkg.name.normalized, + "sha256": pkg.sha256.hex() if pkg.sha256 else None, + "size": pkg.size, + "subdir": pkg.subdir, + "timestamp": int(pkg.timestamp.timestamp() * 1000) + if pkg.timestamp + else None, + "version": str(pkg.version), + } + for pkg in packages + }, "repodata_version": 2, } - conda_packages = env.conda_repodata_records_for_platform(platform) - if not conda_packages: - return repodata - for package in conda_packages: - filename = ( - f"{package.name.normalized}-{package.version}-{package.build}.conda" - ) - repodata[subdir]["packages"][filename] = { # type: ignore[index] - "build": package.build, - "build_number": package.build_number, - "depends": package.depends, - "constrains": package.constrains, - "license": package.license, - "license_family": package.license_family, - "md5": package.md5.hex() if package.md5 else None, - "name": package.name.normalized, - "sha256": package.sha256.hex() if package.sha256 else None, - "size": package.size, - "subdir": package.subdir, - "timestamp": int(package.timestamp.timestamp() * 1000) - if package.timestamp - else None, - "version": str(package.version), - } return repodata @@ -84,15 +83,14 @@ def all_virtual_packages(env: Environment) -> dict[Platform, set[str]]: repo_record = package.as_conda() for dep in repo_record.depends: spec = MatchSpec(dep) - if not spec.name.normalized.startswith("__"): - continue - version = _version_requirement_to_lowest_version(spec.version) - virtual_package = GenericVirtualPackage( - spec.name, - version=Version(version or "0"), - build_string=spec.build or "*", - ) - virtual_packages[platform].add(virtual_package) + if spec.name.normalized.startswith("__"): + version = _version_requirement_to_lowest_version(spec.version) + virtual_package = GenericVirtualPackage( + spec.name, + version=Version(version or "0"), + build_string=spec.build or "*", + ) + virtual_packages[platform].add(virtual_package) return virtual_packages @@ -124,8 +122,9 @@ async def create_subset_lock_file( print(f"Temporary repodata file: {temp_file_path}") dummy_channel = Channel("dummy", ChannelConfig()) sparse_repo_data = SparseRepoData(dummy_channel, str(platform), temp_file_path) - specs = [MatchSpec(f"{pkg}") for pkg in required_packages] + specs = [MatchSpec(pkg) for pkg in required_packages] virtual_packages = all_virtual_packages(env)[platform] + solved_records = await solve_with_sparse_repodata( specs=specs, sparse_repodata=[sparse_repo_data], From f801f80c42d212eda3e6a1e2ed46b9318d4c6a60 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 8 Feb 2025 21:05:44 +0000 Subject: [PATCH 27/39] Update files from markdown-code-runner --- README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 5e370a10..6cb6602d 100644 --- a/README.md +++ b/README.md @@ -420,7 +420,7 @@ See [example](example/) for more information or check the output of `unidep -h` ```bash usage: unidep [-h] - {merge,install,install-all,conda-lock,pixi-lock,pip-compile,pip,conda,version} + {merge,install,install-all,conda-lock,pixi-lock,pip-compile,pip,conda,version} ... Unified Conda and Pip requirements management. @@ -488,8 +488,8 @@ See `unidep merge -h` for more information: ```bash usage: unidep merge [-h] [-o OUTPUT] [-n NAME] [--stdout] - [--selector {sel,comment}] [-d DIRECTORY] [--depth DEPTH] - [-v] + [--selector {sel,comment}] [--pixi] [-d DIRECTORY] + [--depth DEPTH] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] @@ -505,7 +505,7 @@ create `environment.yaml`. These are the defaults, so you can also just run options: -h, --help show this help message and exit -o, --output OUTPUT Output file for the conda environment, by default - `environment.yaml` + `environment.yaml` or `pixi.toml` if `--pixi` is used -n, --name NAME Name of the conda environment, by default `myenv` --stdout Output to stdout instead of a file --selector {sel,comment} @@ -513,6 +513,8 @@ options: `sel` then `- numpy # [linux]` becomes `sel(linux): numpy`, if `comment` then it remains `- numpy # [linux]`, by default `sel` + --pixi Generate a `pixi.toml` file instead of + `environment.yaml` -d, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` From ce745cc2f73649ed6184e463f0584c597e4aa22c Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 13:16:42 -0800 Subject: [PATCH 28/39] relock --- tests/simple_monorepo/pixi.lock | 78 ++++++++++++--------------------- 1 file changed, 29 insertions(+), 49 deletions(-) diff --git a/tests/simple_monorepo/pixi.lock b/tests/simple_monorepo/pixi.lock index 8d532893..7001ae43 100644 --- a/tests/simple_monorepo/pixi.lock +++ b/tests/simple_monorepo/pixi.lock @@ -1,13 +1,13 @@ -# This file is created and managed by `unidep` 0.63.2. +# This file is created and managed by `unidep` 0.67.3. # For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` +# File generated with: `unidep pixi-lock` # # This environment can be installed with # `pixi install` # This file is a `pixi.lock` file generated via `unidep`. # For details see https://pixi.sh/ -version: 5 +version: 6 environments: default: channels: @@ -19,77 +19,57 @@ environments: osx-arm64: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda packages: -- kind: conda - name: bzip2 - version: 1.0.8 - build: h99b78c6_7 - build_number: 7 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 - md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab - depends: - - __osx >=11.0 - license: bzip2-1.0.6 - license_family: BSD - size: 122909 - timestamp: 1720974522888 -- kind: conda - name: bzip2 - version: 1.0.8 - build: hfdf4475_7 - build_number: 7 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 md5: 7ed4301d437b59045be7e051a0308211 depends: - __osx >=10.13 + arch: x86_64 + platform: osx license: bzip2-1.0.6 license_family: BSD size: 134188 timestamp: 1720974491916 -- kind: conda - name: python_abi - version: '3.13' - build: 5_cp313t +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab + depends: + - __osx >=11.0 + arch: arm64 + platform: osx + license: bzip2-1.0.6 + license_family: BSD + size: 122909 + timestamp: 1720974522888 +- conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda build_number: 5 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 md5: 32ba8fc57ccb0b48dd6006974f65c525 constrains: - python 3.13.* *_cp313t + arch: x86_64 + platform: osx license: BSD-3-Clause license_family: BSD size: 6300 timestamp: 1723823108577 -- kind: conda - name: python_abi - version: '3.13' - build: 5_cp313t +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda build_number: 5 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b md5: 24a9a05eba65586da53ad7b56a06dc02 constrains: - python 3.13.* *_cp313t + arch: arm64 + platform: osx license: BSD-3-Clause license_family: BSD size: 6317 timestamp: 1723823118660 -- kind: conda - name: tzdata - version: 2024b - build: hc8b5060_0 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf - md5: 8ac3367aafb1cc0a068483c580af8015 +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + sha256: c4b1ae8a2931fe9b274c44af29c5475a85b37693999f8c792dad0f8c6734b1de + md5: dbcace4706afdfb7eb891f7b37d07c04 license: LicenseRef-Public-Domain - size: 122354 - timestamp: 1728047496079 + size: 122921 + timestamp: 1737119101255 From 2730de98a798768c2c4ca4ba1ad23763b45a9228 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 13:29:27 -0800 Subject: [PATCH 29/39] set manifest file --- unidep/_pixi_lock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index d3972520..a66655fb 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -35,7 +35,7 @@ def _run_pixi_lock( print(f"๐Ÿ—‘๏ธ Removing existing `{pixi_lock_output}`") pixi_lock_output.unlink() - cmd = ["pixi", "list", *extra_flags] + cmd = ["pixi", "lock", "--manifest-path", str(pixi_toml), *extra_flags] print(f"๐Ÿ”’ Locking dependencies with `{' '.join(cmd)}`\n") try: with change_directory(pixi_toml.parent): From b1c8f751e1d7c3db1caaf9af8f05cf306f7a70bc Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 13:42:37 -0800 Subject: [PATCH 30/39] ENH: Track the origin of each Spec --- tests/test_project_dependency_handling.py | 57 ++++++++-- tests/test_unidep.py | 123 +++++++++++++++++++++- unidep/_conflicts.py | 2 + unidep/_dependencies_parsing.py | 13 ++- unidep/platform_definitions.py | 6 +- 5 files changed, 185 insertions(+), 16 deletions(-) diff --git a/tests/test_project_dependency_handling.py b/tests/test_project_dependency_handling.py index 574b8eab..0f65b708 100644 --- a/tests/test_project_dependency_handling.py +++ b/tests/test_project_dependency_handling.py @@ -103,30 +103,69 @@ def test_project_dependency_handling_in_pyproject_toml( expected = { "python-graphviz": [ - Spec(name="python-graphviz", which="conda", identifier="17e5d607"), + Spec( + name="python-graphviz", + which="conda", + identifier="17e5d607", + origin=(p,), + ), ], "graphviz": [ - Spec(name="graphviz", which="pip", identifier="17e5d607"), - Spec(name="graphviz", which="conda", identifier="5eb93b8c"), + Spec(name="graphviz", which="pip", identifier="17e5d607", origin=(p,)), + Spec(name="graphviz", which="conda", identifier="5eb93b8c", origin=(p,)), ], } if project_dependency_handling == "pip-only": expected.update( { - "requests": [Spec(name="requests", which="pip", identifier="08fd8713")], - "pandas": [Spec(name="pandas", which="pip", identifier="9e467fa1")], + "requests": [ + Spec( + name="requests", + which="pip", + identifier="08fd8713", + origin=(p,), + ), + ], + "pandas": [ + Spec( + name="pandas", + which="pip", + identifier="9e467fa1", + origin=(p,), + ), + ], }, ) elif project_dependency_handling == "same-name": expected.update( { "requests": [ - Spec(name="requests", which="conda", identifier="08fd8713"), - Spec(name="requests", which="pip", identifier="08fd8713"), + Spec( + name="requests", + which="conda", + identifier="08fd8713", + origin=(p,), + ), + Spec( + name="requests", + which="pip", + identifier="08fd8713", + origin=(p,), + ), ], "pandas": [ - Spec(name="pandas", which="conda", identifier="9e467fa1"), - Spec(name="pandas", which="pip", identifier="9e467fa1"), + Spec( + name="pandas", + which="conda", + identifier="9e467fa1", + origin=(p,), + ), + Spec( + name="pandas", + which="pip", + identifier="9e467fa1", + origin=(p,), + ), ], }, ) diff --git a/tests/test_unidep.py b/tests/test_unidep.py index 9a0b34b8..4f2d4300 100644 --- a/tests/test_unidep.py +++ b/tests/test_unidep.py @@ -130,6 +130,7 @@ def test_parse_requirements( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", @@ -137,18 +138,21 @@ def test_parse_requirements( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", which="conda", selector="unix", identifier="530d9eaa", + origin=(p,), ), Spec( name="foo", which="pip", selector="unix", identifier="530d9eaa", + origin=(p,), ), ], "bar": [ @@ -157,22 +161,26 @@ def test_parse_requirements( which="conda", pin=">1", identifier="08fd8713", + origin=(p,), ), Spec( name="bar", which="pip", pin=">1", identifier="08fd8713", + origin=(p,), ), Spec( name="bar", which="conda", identifier="9e467fa1", + origin=(p,), ), Spec( name="bar", which="pip", identifier="9e467fa1", + origin=(p,), ), ], } @@ -421,12 +429,14 @@ def test_surrounding_comments( which="conda", selector="osx", identifier="8b0c4c31", + origin=(p,), ), Spec( name="yolo", which="pip", selector="osx", identifier="8b0c4c31", + origin=(p,), ), ], "foo": [ @@ -435,12 +445,14 @@ def test_surrounding_comments( which="conda", selector="linux", identifier="ecd4baa6", + origin=(p,), ), Spec( name="foo", which="pip", selector="linux", identifier="ecd4baa6", + origin=(p,), ), ], "bar": [ @@ -449,12 +461,14 @@ def test_surrounding_comments( which="conda", selector="win", identifier="8528de75", + origin=(p,), ), Spec( name="bar", which="pip", selector="win", identifier="8528de75", + origin=(p,), ), ], "baz": [ @@ -462,14 +476,21 @@ def test_surrounding_comments( name="baz", which="conda", identifier="9e467fa1", + origin=(p,), + ), + Spec( + name="baz", + which="pip", + identifier="9e467fa1", + origin=(p,), ), - Spec(name="baz", which="pip", identifier="9e467fa1"), ], "pip-package": [ Spec( name="pip-package", which="pip", identifier="5813b64a", + origin=(p,), ), ], "pip-package2": [ @@ -478,6 +499,7 @@ def test_surrounding_comments( which="pip", selector="osx", identifier="1c0fa4c4", + origin=(p,), ), ], } @@ -513,6 +535,7 @@ def test_filter_pip_and_conda( which="conda", selector="linux64", identifier="c292b98a", + origin=(p,), ), ], "package2": [ @@ -521,6 +544,7 @@ def test_filter_pip_and_conda( which="conda", selector="osx64", identifier="b2ac468f", + origin=(p,), ), ], "package3": [ @@ -528,6 +552,7 @@ def test_filter_pip_and_conda( name="package3", which="pip", identifier="08fd8713", + origin=(p,), ), ], "package4": [ @@ -536,6 +561,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), ], "common_package": [ @@ -544,12 +570,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), ], "shared_package": [ @@ -558,12 +586,14 @@ def test_filter_pip_and_conda( which="conda", selector="linux64", identifier="1599d575", + origin=(p,), ), Spec( name="shared_package", which="pip", selector="win64", identifier="46630b59", + origin=(p,), ), ], } @@ -580,6 +610,7 @@ def test_filter_pip_and_conda( which="conda", selector="linux64", identifier="c292b98a", + origin=(p,), ), }, }, @@ -590,6 +621,7 @@ def test_filter_pip_and_conda( which="conda", selector="osx64", identifier="b2ac468f", + origin=(p,), ), }, }, @@ -599,6 +631,7 @@ def test_filter_pip_and_conda( name="package3", which="pip", identifier="08fd8713", + origin=(p,), ), }, }, @@ -609,6 +642,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), }, "linux-aarch64": { @@ -617,6 +651,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), }, "linux-ppc64le": { @@ -625,6 +660,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), }, "osx-64": { @@ -633,6 +669,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), }, "osx-arm64": { @@ -641,6 +678,7 @@ def test_filter_pip_and_conda( which="pip", selector="unix", identifier="1d5d7757", + origin=(p,), ), }, }, @@ -651,12 +689,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), }, "linux-aarch64": { @@ -665,12 +705,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), }, "linux-ppc64le": { @@ -679,12 +721,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), }, "osx-64": { @@ -693,12 +737,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), }, "osx-arm64": { @@ -707,12 +753,14 @@ def test_filter_pip_and_conda( which="conda", selector="unix", identifier="f78244dc", + origin=(p,), ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", + origin=(p,), ), }, }, @@ -723,6 +771,7 @@ def test_filter_pip_and_conda( which="conda", selector="linux64", identifier="1599d575", + origin=(p,), ), }, "win-64": { @@ -731,6 +780,7 @@ def test_filter_pip_and_conda( which="pip", selector="win64", identifier="46630b59", + origin=(p,), ), }, }, @@ -796,6 +846,7 @@ def test_duplicates_with_version( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", @@ -803,18 +854,21 @@ def test_duplicates_with_version( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", which="conda", selector="linux64", identifier="dd6a8aaf", + origin=(p,), ), Spec( name="foo", which="pip", selector="linux64", identifier="dd6a8aaf", + origin=(p,), ), ], "bar": [ @@ -822,11 +876,13 @@ def test_duplicates_with_version( name="bar", which="conda", identifier="08fd8713", + origin=(p,), ), Spec( name="bar", which="pip", identifier="08fd8713", + origin=(p,), ), ], } @@ -840,6 +896,7 @@ def test_duplicates_with_version( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), "pip": Spec( name="foo", @@ -847,6 +904,7 @@ def test_duplicates_with_version( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), }, }, @@ -856,11 +914,13 @@ def test_duplicates_with_version( name="bar", which="conda", identifier="08fd8713", + origin=(p,), ), "pip": Spec( name="bar", which="pip", identifier="08fd8713", + origin=(p,), ), }, }, @@ -905,6 +965,7 @@ def test_duplicates_different_platforms( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", @@ -912,6 +973,7 @@ def test_duplicates_different_platforms( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", @@ -919,6 +981,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), Spec( name="foo", @@ -926,6 +989,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), ], } @@ -938,12 +1002,14 @@ def test_duplicates_different_platforms( which="conda", pin=">1,<=2", identifier="c292b98a", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin=">1,<=2", identifier="c292b98a", + origin=(p,), ), }, "linux-aarch64": { @@ -953,6 +1019,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), "pip": Spec( name="foo", @@ -960,6 +1027,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), }, "linux-ppc64le": { @@ -969,6 +1037,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), "pip": Spec( name="foo", @@ -976,6 +1045,7 @@ def test_duplicates_different_platforms( selector="linux", pin="<=2", identifier="ecd4baa6", + origin=(p,), ), }, }, @@ -1032,6 +1102,7 @@ def test_expand_none_with_different_platforms( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", @@ -1039,18 +1110,21 @@ def test_expand_none_with_different_platforms( selector="linux64", pin=">1", identifier="c292b98a", + origin=(p,), ), Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), ], } @@ -1063,12 +1137,14 @@ def test_expand_none_with_different_platforms( which="conda", pin=">1,<3", identifier="c292b98a", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin=">1,<3", identifier="c292b98a", + origin=(p,), ), }, "linux-aarch64": { @@ -1077,12 +1153,14 @@ def test_expand_none_with_different_platforms( which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), }, "linux-ppc64le": { @@ -1091,12 +1169,14 @@ def test_expand_none_with_different_platforms( which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), }, "osx-64": { @@ -1105,12 +1185,14 @@ def test_expand_none_with_different_platforms( which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), }, "osx-arm64": { @@ -1119,12 +1201,14 @@ def test_expand_none_with_different_platforms( which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), }, "win-64": { @@ -1133,12 +1217,14 @@ def test_expand_none_with_different_platforms( which="conda", pin="<3", identifier="5eb93b8c", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", + origin=(p,), ), }, }, @@ -1191,12 +1277,14 @@ def test_different_pins_on_conda_and_pip( which="conda", pin="<1", identifier="17e5d607", + origin=(p,), ), Spec( name="foo", which="pip", pin=">1", identifier="17e5d607", + origin=(p,), ), ], } @@ -1210,12 +1298,14 @@ def test_different_pins_on_conda_and_pip( which="conda", pin="<1", identifier="17e5d607", + origin=(p,), ), "pip": Spec( name="foo", which="pip", pin=">1", identifier="17e5d607", + origin=(p,), ), }, }, @@ -1620,6 +1710,7 @@ def test_pip_and_conda_different_name_on_linux64( which="conda", selector="linux64", identifier="c292b98a", + origin=(p,), ), ], "cuquantum": [ @@ -1628,6 +1719,7 @@ def test_pip_and_conda_different_name_on_linux64( which="pip", selector="linux64", identifier="c292b98a", + origin=(p,), ), ], } @@ -1641,6 +1733,7 @@ def test_pip_and_conda_different_name_on_linux64( which="conda", selector="linux64", identifier="c292b98a", + origin=(p,), ), }, }, @@ -1651,6 +1744,7 @@ def test_pip_and_conda_different_name_on_linux64( which="pip", selector="linux64", identifier="c292b98a", + origin=(p,), ), }, }, @@ -1687,11 +1781,13 @@ def test_parse_requirements_with_ignore_pin( name="foo", which="conda", identifier="17e5d607", + origin=(p,), ), Spec( name="foo", which="pip", identifier="17e5d607", + origin=(p,), ), ], } @@ -1725,11 +1821,13 @@ def test_parse_requirements_with_skip_dependency( name="baz", which="conda", identifier="08fd8713", + origin=(p,), ), Spec( name="baz", which="pip", identifier="08fd8713", + origin=(p,), ), ], } @@ -1757,6 +1855,7 @@ def test_pin_star_cuda(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path) -> selector="linux64", pin="* cuda*", identifier="c292b98a", + origin=(p,), ), Spec( name="qsimcirq", @@ -1764,6 +1863,7 @@ def test_pin_star_cuda(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path) -> selector="arm64", pin="* cpu*", identifier="489f33e0", + origin=(p,), ), ], } @@ -1797,12 +1897,14 @@ def test_parse_requirements_with_overwrite_pins( which="conda", pin="=1", identifier="17e5d607", + origin=(p,), ), Spec( name="foo", which="pip", pin="=1", identifier="17e5d607", + origin=(p,), ), ], "bar": [ @@ -1811,6 +1913,7 @@ def test_parse_requirements_with_overwrite_pins( which="conda", pin="* cpu*", identifier="5eb93b8c", + origin=(p,), ), ], } @@ -1845,12 +1948,14 @@ def test_duplicate_names_different_platforms( which="pip", selector="arm64", identifier="1b26c5b2", + origin=(p,), ), Spec( name="ray", which="pip", selector="linux64", identifier="dd6a8aaf", + origin=(p,), ), ], "ray-core": [ @@ -1859,6 +1964,7 @@ def test_duplicate_names_different_platforms( which="conda", selector="linux64", identifier="dd6a8aaf", + origin=(p,), ), ], } @@ -1991,6 +2097,7 @@ def test_pip_with_pinning_special_case_wildcard( which="pip", pin="* cuda*", identifier="17e5d607", + origin=(p1,), ), }, }, @@ -2047,6 +2154,7 @@ def test_pip_with_pinning_special_case_git_repo( which="pip", pin="@ git+https://github.com/python-adaptive/adaptive.git@main", identifier="17e5d607", + origin=(p1,), ), }, }, @@ -2082,12 +2190,14 @@ def test_not_equal( which="conda", pin="!=1.0.0,<2", identifier="17e5d607", + origin=(p1,), ), "pip": Spec( name="adaptive", which="pip", pin="!=1.0.0,<2", identifier="17e5d607", + origin=(p1,), ), }, }, @@ -2114,8 +2224,13 @@ def test_dot_in_package_name( requirements = parse_requirements(p1, verbose=False) assert requirements.requirements == { "ruamel.yaml": [ - Spec(name="ruamel.yaml", which="conda", identifier="17e5d607"), - Spec(name="ruamel.yaml", which="pip", identifier="17e5d607"), + Spec( + name="ruamel.yaml", + which="conda", + identifier="17e5d607", + origin=(p1,), + ), + Spec(name="ruamel.yaml", which="pip", identifier="17e5d607", origin=(p1,)), ], } @@ -2249,6 +2364,7 @@ def test_pip_dep_with_extras( pin=None, identifier="17e5d607", selector=None, + origin=(p,), ), }, }, @@ -2260,6 +2376,7 @@ def test_pip_dep_with_extras( pin=None, identifier="17e5d607", selector=None, + origin=(p,), ), }, }, diff --git a/unidep/_conflicts.py b/unidep/_conflicts.py index 3ad8c749..f9105f3a 100644 --- a/unidep/_conflicts.py +++ b/unidep/_conflicts.py @@ -88,11 +88,13 @@ def _maybe_new_spec_with_combined_pinnings( first = pinned_specs[0] pins = [m.pin for m in pinned_specs] pin = combine_version_pinnings(pins, name=first.name) # type: ignore[arg-type] + combined_files = tuple({f for spec in specs for f in (spec.origin or ())}) return Spec( name=first.name, which=first.which, pin=pin, identifier=first.identifier, # should I create a new one? + origin=combined_files, ) # Flatten the list diff --git a/unidep/_dependencies_parsing.py b/unidep/_dependencies_parsing.py index 54158119..8972901e 100644 --- a/unidep/_dependencies_parsing.py +++ b/unidep/_dependencies_parsing.py @@ -112,6 +112,7 @@ def _parse_dependency( ignore_pins: list[str], overwrite_pins: dict[str, str | None], skip_dependencies: list[str], + origin: Path, ) -> list[Spec]: name, pin, selector = parse_package_str(dependency) if name in ignore_pins: @@ -130,10 +131,10 @@ def _parse_dependency( identifier_hash = _identifier(identifier, selector) if which == "both": return [ - Spec(name, "conda", pin, identifier_hash, selector), - Spec(name, "pip", pin, identifier_hash, selector), + Spec(name, "conda", pin, identifier_hash, selector, origin=(origin,)), + Spec(name, "pip", pin, identifier_hash, selector, origin=(origin,)), ] - return [Spec(name, which, pin, identifier_hash, selector)] + return [Spec(name, which, pin, identifier_hash, selector, origin=(origin,))] class ParsedRequirements(NamedTuple): @@ -271,6 +272,7 @@ def _update_data_structures( if verbose: print(f"๐Ÿ“„ Parsing `{path_with_extras.path_with_extras}`") data = _load(path_with_extras.path, yaml) + data["_origin"] = path_with_extras.path datas.append(data) _move_local_optional_dependencies_to_local_dependencies( data=data, # modified in place @@ -486,6 +488,7 @@ def parse_requirements( ignore_pins, overwrite_pins_map, skip_dependencies, + origin=data["_origin"], ) for opt_name, opt_deps in data.get("optional_dependencies", {}).items(): if opt_name in _extras or "*" in _extras: @@ -497,6 +500,7 @@ def parse_requirements( overwrite_pins_map, skip_dependencies, is_optional=True, + origin=data["_origin"], ) return ParsedRequirements( @@ -533,6 +537,7 @@ def _add_dependencies( skip_dependencies: list[str], *, is_optional: bool = False, + origin: Path, ) -> int: for i, dep in enumerate(dependencies): identifier += 1 @@ -546,6 +551,7 @@ def _add_dependencies( ignore_pins, overwrite_pins_map, skip_dependencies, + origin, ) for spec in specs: _check_allowed_local_dependency(spec.name, is_optional) @@ -563,6 +569,7 @@ def _add_dependencies( ignore_pins, overwrite_pins_map, skip_dependencies, + origin, ) for spec in specs: _check_allowed_local_dependency(spec.name, is_optional) diff --git a/unidep/platform_definitions.py b/unidep/platform_definitions.py index a4151c31..31517534 100644 --- a/unidep/platform_definitions.py +++ b/unidep/platform_definitions.py @@ -6,7 +6,10 @@ from __future__ import annotations import sys -from typing import NamedTuple, cast +from typing import TYPE_CHECKING, NamedTuple, cast + +if TYPE_CHECKING: + from pathlib import Path if sys.version_info >= (3, 8): from typing import Literal, get_args @@ -120,6 +123,7 @@ class Spec(NamedTuple): identifier: str | None = None # can be of type `Selector` but also space separated string of `Selector`s selector: str | None = None + origin: tuple[Path, ...] = () def platforms(self) -> list[Platform] | None: """Return the platforms for this dependency.""" From 2ef41e0a3ec1fe8a750f3c272f4d1ffbc63d9352 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 14:03:05 -0800 Subject: [PATCH 31/39] Extra test --- tests/test_unidep.py | 127 +++++++++++++++++++++++++++++++++++++++++++ unidep/_conflicts.py | 16 +++++- 2 files changed, 140 insertions(+), 3 deletions(-) diff --git a/tests/test_unidep.py b/tests/test_unidep.py index 4f2d4300..71b5c52d 100644 --- a/tests/test_unidep.py +++ b/tests/test_unidep.py @@ -2581,3 +2581,130 @@ def test_optional_dependencies_with_version_specifier( ) assert resolved.keys() == {"adaptive"} assert resolved["adaptive"][None]["conda"].pin == "=0.13.2" + + +@pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) +def test_origin_in_spec( + tmp_path: Path, + toml_or_yaml: Literal["toml", "yaml"], +) -> None: + d1 = tmp_path / "dir1" + d1.mkdir() + f1 = d1 / "requirements.yaml" + f1.write_text("dependencies:\n - numpy\n - conda: mumps") + + d2 = tmp_path / "dir2" + d2.mkdir() + f2 = d2 / "requirements.yaml" + f2.write_text("dependencies:\n - pip: pandas\n - numpy") + f1 = maybe_as_toml(toml_or_yaml, f1) + f2 = maybe_as_toml(toml_or_yaml, f2) + + requirements = parse_requirements(f1, f2, verbose=False) + assert requirements.requirements == { + "numpy": [ + Spec( + name="numpy", + which="conda", + pin=None, + identifier="17e5d607", + selector=None, + origin=(f1,), + ), + Spec( + name="numpy", + which="pip", + pin=None, + identifier="17e5d607", + selector=None, + origin=(f1,), + ), + Spec( + name="numpy", + which="conda", + pin=None, + identifier="9e467fa1", + selector=None, + origin=(f2,), + ), + Spec( + name="numpy", + which="pip", + pin=None, + identifier="9e467fa1", + selector=None, + origin=(f2,), + ), + ], + "mumps": [ + Spec( + name="mumps", + which="conda", + pin=None, + identifier="5eb93b8c", + selector=None, + origin=(f1,), + ), + ], + "pandas": [ + Spec( + name="pandas", + which="pip", + pin=None, + identifier="08fd8713", + selector=None, + origin=(f2,), + ), + ], + } + + resolved = resolve_conflicts( + requirements.requirements, + requirements.platforms, + ) + assert resolved == { + "numpy": { + None: { + "conda": Spec( + name="numpy", + which="conda", + pin=None, + identifier="17e5d607", + selector=None, + origin=(f1, f2), + ), + "pip": Spec( + name="numpy", + which="pip", + pin=None, + identifier="17e5d607", + selector=None, + origin=(f1, f2), + ), + }, + }, + "mumps": { + None: { + "conda": Spec( + name="mumps", + which="conda", + pin=None, + identifier="5eb93b8c", + selector=None, + origin=(f1,), + ), + }, + }, + "pandas": { + None: { + "pip": Spec( + name="pandas", + which="pip", + pin=None, + identifier="08fd8713", + selector=None, + origin=(f2,), + ), + }, + }, + } diff --git a/unidep/_conflicts.py b/unidep/_conflicts.py index f9105f3a..42272d37 100644 --- a/unidep/_conflicts.py +++ b/unidep/_conflicts.py @@ -82,22 +82,32 @@ def _maybe_new_spec_with_combined_pinnings( specs: list[Spec], ) -> Spec: pinned_specs = [m for m in specs if m.pin is not None] + combined_origin = tuple(sorted({p for s in specs for p in s.origin})) if len(pinned_specs) == 1: - return pinned_specs[0] + if len(combined_origin) == 1: + return pinned_specs[0] + # If there is only one pinned spec, but the origins are different, + # we need to create a new spec with the combined origin. + return pinned_specs[0]._replace(origin=combined_origin) + if len(pinned_specs) > 1: first = pinned_specs[0] pins = [m.pin for m in pinned_specs] pin = combine_version_pinnings(pins, name=first.name) # type: ignore[arg-type] - combined_files = tuple({f for spec in specs for f in (spec.origin or ())}) return Spec( name=first.name, which=first.which, pin=pin, identifier=first.identifier, # should I create a new one? - origin=combined_files, + origin=combined_origin, ) # Flatten the list + assert len(pinned_specs) == 0 + if len(combined_origin) > 1: + # If there are no pinned specs, but the origins are different, + # we need to create a new spec with the combined origin. + return specs[0]._replace(origin=combined_origin) return specs[0] From 54e7a7704c182227179bc1300baac9730410bfec Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 14:04:08 -0800 Subject: [PATCH 32/39] rename --- unidep/_conda_env.py | 4 ++-- unidep/_conflicts.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/unidep/_conda_env.py b/unidep/_conda_env.py index 6f08448b..4239f247 100644 --- a/unidep/_conda_env.py +++ b/unidep/_conda_env.py @@ -15,7 +15,7 @@ from unidep._conflicts import ( VersionConflictError, - _maybe_new_spec_with_combined_pinnings, + _maybe_new_spec_with_combined_pinnings_and_origins, ) from unidep.platform_definitions import ( PLATFORM_SELECTOR_MAP, @@ -111,7 +111,7 @@ def _resolve_multiple_platform_conflicts( specs, (first_platform, *_) = zip(*spec_to_platforms.items()) first, *others = specs try: - spec = _maybe_new_spec_with_combined_pinnings(specs) # type: ignore[arg-type] + spec = _maybe_new_spec_with_combined_pinnings_and_origins(specs) # type: ignore[arg-type] except VersionConflictError: # We have a conflict, select the first one. msg = ( diff --git a/unidep/_conflicts.py b/unidep/_conflicts.py index 42272d37..9d529e13 100644 --- a/unidep/_conflicts.py +++ b/unidep/_conflicts.py @@ -78,7 +78,7 @@ def _pop_unused_platforms_and_maybe_expand_none( platform_data.pop(_platform) -def _maybe_new_spec_with_combined_pinnings( +def _maybe_new_spec_with_combined_pinnings_and_origins( specs: list[Spec], ) -> Spec: pinned_specs = [m for m in specs if m.pin is not None] @@ -118,7 +118,7 @@ def _combine_pinning_within_platform( for _platform, packages in data.items(): reduced_data[_platform] = {} for which, specs in packages.items(): - spec = _maybe_new_spec_with_combined_pinnings(specs) + spec = _maybe_new_spec_with_combined_pinnings_and_origins(specs) reduced_data[_platform][which] = spec return reduced_data From ac5cda9d9085b359136c5e14ce8ce7628a023000 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 14:25:18 -0800 Subject: [PATCH 33/39] Define as features --- unidep/_pixi.py | 131 ++++++++++++++++++++++++++++++++++-------------- 1 file changed, 92 insertions(+), 39 deletions(-) diff --git a/unidep/_pixi.py b/unidep/_pixi.py index 84ae34e8..bb780931 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -60,48 +60,101 @@ def _initialize_pixi_data( return pixi_data -def _process_dependencies( +def group_by_origin( + resolved_deps: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], +) -> dict[Path, dict[str, dict[Platform | None, dict[CondaPip, Spec]]]]: + groups: dict[Path, dict[str, dict[Platform | None, dict[CondaPip, Spec]]]] = {} + for pkg_name, platform_map in resolved_deps.items(): + for plat, manager_map in platform_map.items(): + for manager, spec in manager_map.items(): + for origin in spec.origin: + # Normalize origin to a Path object + origin_path = Path(origin) + groups.setdefault(origin_path, {}) + groups[origin_path].setdefault(pkg_name, {}) + groups[origin_path][pkg_name].setdefault(plat, {}) + groups[origin_path][pkg_name][plat][manager] = spec + return groups + + +def _process_dependencies( # noqa: PLR0912 pixi_data: dict[str, dict[str, Any]], resolved_dependencies: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], ) -> None: - # Extract conda and pip dependencies - conda_deps, pip_deps = _extract_conda_pip_dependencies(resolved_dependencies) - - # Process conda dependencies - for pkg_name, platform_to_spec in conda_deps.items(): - for _platform, spec in platform_to_spec.items(): - pin = spec.pin or "*" - if _platform is None: - # Applies to all platforms - pixi_data["dependencies"][pkg_name] = pin - else: - # Platform-specific dependency - # Ensure target section exists - target = pixi_data["target"].setdefault(_platform, {}) - deps = target.setdefault("dependencies", {}) - deps[pkg_name] = pin - - # Process pip dependencies - for pkg_name, platform_to_spec in pip_deps.items(): - for _platform, spec in platform_to_spec.items(): - pin = spec.pin or "*" - if _platform is None: - # Applies to all platforms - pixi_data["pypi-dependencies"][pkg_name] = pin - else: - # Platform-specific dependency - # Ensure target section exists - target = pixi_data["target"].setdefault(_platform, {}) - deps = target.setdefault("pypi-dependencies", {}) - deps[pkg_name] = pin - - # Remove empty sections if necessary - if not pixi_data["dependencies"]: - del pixi_data["dependencies"] - if not pixi_data["pypi-dependencies"]: - del pixi_data["pypi-dependencies"] - if not pixi_data["target"]: - del pixi_data["target"] + """Process the resolved dependencies and update the pixi manifest data. + + This function first groups the resolved dependencies by origin (using + group_by_origin) and then creates a separate feature (under the "feature" + key in pixi_data) for each origin. The feature name is derived using the + parent directory's stem of the origin file. + + After creating the per-origin features, if the manifest does not yet have an + "environments" table, we automatically add one with: + - a "default" environment that includes all features, and + - one environment per feature (with the feature name as the sole member). + """ + # --- Step 1: Group by origin and create per-origin features --- + origin_groups = group_by_origin(resolved_dependencies) + features = pixi_data.setdefault("feature", {}) + + for origin_path, group_deps in origin_groups.items(): + # Derive a feature name from the parent folder of the origin file. + feature_name = origin_path.resolve().parent.stem + + # Initialize the feature entry. + feature_entry: dict[str, Any] = { + "dependencies": {}, + "pypi-dependencies": {}, + "target": {}, + } + + # Extract conda and pip dependencies from the grouped data. + group_conda, group_pip = _extract_conda_pip_dependencies(group_deps) + + # Process conda dependencies for this feature. + for pkg_name, platform_to_spec in group_conda.items(): + for _platform, spec in platform_to_spec.items(): + pin = spec.pin or "*" + if _platform is None: + feature_entry["dependencies"][pkg_name] = pin + else: + target = feature_entry["target"].setdefault(_platform, {}) + deps = target.setdefault("dependencies", {}) + deps[pkg_name] = pin + + # Process pip dependencies for this feature. + for pkg_name, platform_to_spec in group_pip.items(): + for _platform, spec in platform_to_spec.items(): + pin = spec.pin or "*" + if _platform is None: + feature_entry["pypi-dependencies"][pkg_name] = pin + else: + target = feature_entry["target"].setdefault(_platform, {}) + deps = target.setdefault("pypi-dependencies", {}) + deps[pkg_name] = pin + + # Remove empty sections. + if not feature_entry["dependencies"]: + del feature_entry["dependencies"] + if not feature_entry["pypi-dependencies"]: + del feature_entry["pypi-dependencies"] + if not feature_entry["target"]: + del feature_entry["target"] + + # Save this feature entry. + features[feature_name] = feature_entry + + # --- Step 2: Automatically add the environments table if not already defined --- + if "environments" not in pixi_data: + all_features = list(features.keys()) + pixi_data["environments"] = {} + # The "default" environment will include all features. + pixi_data["environments"]["default"] = all_features + # Also create one environment per feature. + for feat in all_features: + # Environment names cannot use _, only lowercase letters, digits, and - + name = feat.replace("_", "-") + pixi_data["environments"][name] = [feat] def _write_pixi_toml( From cff66b4153f6f923842b8354fb9797c3589d046c Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 14:39:25 -0800 Subject: [PATCH 34/39] new locking mechanism --- tests/simple_monorepo/pixi.lock | 22 ++ tests/simple_monorepo/pixi.toml | 28 +- tests/simple_monorepo/project1/pixi.lock | 84 +----- tests/simple_monorepo/project2/pixi.lock | 62 +---- unidep/_pixi_lock.py | 324 ++++++++--------------- 5 files changed, 179 insertions(+), 341 deletions(-) diff --git a/tests/simple_monorepo/pixi.lock b/tests/simple_monorepo/pixi.lock index 7001ae43..6b535804 100644 --- a/tests/simple_monorepo/pixi.lock +++ b/tests/simple_monorepo/pixi.lock @@ -20,6 +20,28 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + project1: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + project2: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + simple-monorepo: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda packages: - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 diff --git a/tests/simple_monorepo/pixi.toml b/tests/simple_monorepo/pixi.toml index 5cbdb7a5..7b3d2dee 100644 --- a/tests/simple_monorepo/pixi.toml +++ b/tests/simple_monorepo/pixi.toml @@ -9,8 +9,32 @@ channels = [ ] [dependencies] + +[pypi-dependencies] + +[target] + +[feature.project1.dependencies] bzip2 = "*" -python_abi = "*" -[target.osx-arm64.dependencies] +[feature.project2.target.osx-arm64.dependencies] tzdata = "*" + +[feature.simple_monorepo.dependencies] +python_abi = "*" + +[environments] +default = [ + "project1", + "project2", + "simple_monorepo", +] +project1 = [ + "project1", +] +project2 = [ + "project2", +] +simple-monorepo = [ + "simple_monorepo", +] diff --git a/tests/simple_monorepo/project1/pixi.lock b/tests/simple_monorepo/project1/pixi.lock index 7256091e..b5b303ec 100644 --- a/tests/simple_monorepo/project1/pixi.lock +++ b/tests/simple_monorepo/project1/pixi.lock @@ -1,12 +1,4 @@ -# This file is created and managed by `unidep` 0.63.2. -# For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` -# -# This environment can be installed with -# `pixi install` -# This file is a `pixi.lock` file generated via `unidep`. -# For details see https://pixi.sh/ - +version: 6 environments: default: channels: @@ -14,82 +6,28 @@ environments: packages: osx-64: - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda osx-arm64: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda packages: -- build: hfdf4475_7 - build_number: 7 +- conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 + md5: 7ed4301d437b59045be7e051a0308211 depends: - __osx >=10.13 - kind: conda + arch: x86_64 + platform: osx license: bzip2-1.0.6 license_family: BSD - md5: 7ed4301d437b59045be7e051a0308211 - name: bzip2 - sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 size: 134188 - subdir: osx-64 timestamp: 1720974491916 - url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - version: 1.0.8 -- build: h99b78c6_7 - build_number: 7 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab depends: - __osx >=11.0 - kind: conda + arch: arm64 + platform: osx license: bzip2-1.0.6 license_family: BSD - md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab - name: bzip2 - sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 size: 122909 - subdir: osx-arm64 timestamp: 1720974522888 - url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - version: 1.0.8 -- build: hc8b5060_0 - kind: conda - license: LicenseRef-Public-Domain - md5: 8ac3367aafb1cc0a068483c580af8015 - name: tzdata - noarch: generic - sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf - size: 122354 - subdir: noarch - timestamp: 1728047496079 - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - version: 2024b -- build: 5_cp313t - build_number: 5 - constrains: - - python 3.13.* *_cp313t - kind: conda - license: BSD-3-Clause - license_family: BSD - md5: 32ba8fc57ccb0b48dd6006974f65c525 - name: python_abi - sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 - size: 6300 - subdir: osx-64 - timestamp: 1723823108577 - url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda - version: '3.13' -- build: 5_cp313t - build_number: 5 - constrains: - - python 3.13.* *_cp313t - kind: conda - license: BSD-3-Clause - license_family: BSD - md5: 24a9a05eba65586da53ad7b56a06dc02 - name: python_abi - sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b - size: 6317 - subdir: osx-arm64 - timestamp: 1723823118660 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda - version: '3.13' -version: 5 diff --git a/tests/simple_monorepo/project2/pixi.lock b/tests/simple_monorepo/project2/pixi.lock index 64346111..0576373b 100644 --- a/tests/simple_monorepo/project2/pixi.lock +++ b/tests/simple_monorepo/project2/pixi.lock @@ -1,63 +1,15 @@ -# This file is created and managed by `unidep` 0.63.2. -# For details see https://github.com/basnijholt/unidep -# File generated with: `unidep pixi-lock -d tests/simple_monorepo -p osx-64 -p osx-arm64` -# -# This environment can be installed with -# `pixi install` -# This file is a `pixi.lock` file generated via `unidep`. -# For details see https://pixi.sh/ - +version: 6 environments: default: channels: - url: https://conda.anaconda.org/conda-forge/ packages: - osx-64: - - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda packages: -- build: hc8b5060_0 - kind: conda +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + sha256: c4b1ae8a2931fe9b274c44af29c5475a85b37693999f8c792dad0f8c6734b1de + md5: dbcace4706afdfb7eb891f7b37d07c04 license: LicenseRef-Public-Domain - md5: 8ac3367aafb1cc0a068483c580af8015 - name: tzdata - noarch: generic - sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf - size: 122354 - subdir: noarch - timestamp: 1728047496079 - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda - version: 2024b -- build: 5_cp313t - build_number: 5 - constrains: - - python 3.13.* *_cp313t - kind: conda - license: BSD-3-Clause - license_family: BSD - md5: 32ba8fc57ccb0b48dd6006974f65c525 - name: python_abi - sha256: a96553de64be6441400e88c2c6ad7123d91cbcea4898b5966a653163f30d9f55 - size: 6300 - subdir: osx-64 - timestamp: 1723823108577 - url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.13-5_cp313t.conda - version: '3.13' -- build: 5_cp313t - build_number: 5 - constrains: - - python 3.13.* *_cp313t - kind: conda - license: BSD-3-Clause - license_family: BSD - md5: 24a9a05eba65586da53ad7b56a06dc02 - name: python_abi - sha256: 2165466ff175e1890b66d079d64449a1b6dd9873fb0f5e977839ccc4639b813b - size: 6317 - subdir: osx-arm64 - timestamp: 1723823118660 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313t.conda - version: '3.13' -version: 5 + size: 122921 + timestamp: 1737119101255 diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index a66655fb..409f0211 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -1,15 +1,13 @@ from __future__ import annotations -import re import shutil import subprocess import sys -from collections import defaultdict from typing import TYPE_CHECKING, Any, NamedTuple from ruamel.yaml import YAML -from unidep._dependencies_parsing import find_requirements_files, parse_requirements +from unidep._dependencies_parsing import find_requirements_files from unidep.utils import add_comment_to_file, change_directory if TYPE_CHECKING: @@ -112,197 +110,6 @@ class PixiLockSpec(NamedTuple): indexes: list[str] -def _filter_clean_deps(dependencies: list[str]) -> list[str]: - package_names = [] - for dep in dependencies: - # Split the dependency and the environment marker - if ";" in dep: - dep_part, marker_part = dep.split(";", 1) - marker_part = marker_part.strip() - else: - dep_part = dep - marker_part = "" - - # Skip if 'extra ==' is in the environment marker - if "extra ==" in marker_part: - continue - - # Extract the package name - dep_part = dep_part.strip() - package_name = re.split(r"[<>=!~\s]", dep_part)[0] - package_names.append(package_name) - - return package_names - - -def _parse_pixi_lock_packages( - pixi_lock_data: dict[str, Any], -) -> dict[str, PixiLockSpec]: - # Build a mapping from URL to package metadata - url_to_package = {pkg["url"]: pkg for pkg in pixi_lock_data.get("packages", [])} - lock_specs: dict[str, PixiLockSpec] = {} - environments = pixi_lock_data.get("environments", {}) - for env_name, env_data in environments.items(): - deps: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = defaultdict( - lambda: defaultdict(lambda: defaultdict(set)), - ) - for platform, packages_dict in env_data.get("packages", {}).items(): - for manager_url in packages_dict: - for manager, url in manager_url.items(): - dep = url_to_package[url] - name = dep["name"] - depends = dep.get( - "depends" if manager == "conda" else "requires_dict", - [], - ) - deps[manager][platform][name].update(_filter_clean_deps(depends)) - - resolved: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = {} - for manager, platforms in deps.items(): - resolved_manager = resolved.setdefault(manager, {}) - for _platform, pkgs in platforms.items(): - _resolved: dict[str, set[str]] = {} - for package in list(pkgs): - _recurse_pixi(package, _resolved, pkgs, set()) - resolved_manager[_platform] = _resolved - - packages: dict[tuple[CondaPip, Platform, str], list[dict[str, Any]]] = ( - defaultdict(list) - ) - for p in pixi_lock_data.get("packages", []): - # TODO: subdir is missing for pypi! This will cause issues - # later in the code. - key = (p["kind"], p.get("subdir"), p["name"]) - # Could be multiple entries for the same package, - # e.g., different wheels for different OS versions - packages[key].append(p) - - # Flatten the `dependencies` dict to same format as `packages` - dependencies = { - (which, platform, name): deps - for which, platforms in resolved.items() - for platform, pkgs in platforms.items() - for name, deps in pkgs.items() - } - lock_specs[env_name] = PixiLockSpec( - packages, - dependencies, - env_data.get("channels", []), - env_data.get("indexes", []), - ) - - return lock_specs - - -def _recurse_pixi( - package_name: str, - resolved: dict[str, set[str]], - dependencies: dict[str, set[str]], - seen: set[str], -) -> set[str]: - if package_name in resolved: - return resolved[package_name] - if package_name in seen: # Circular dependency detected - return set() - seen.add(package_name) - - all_deps = set(dependencies.get(package_name, [])) - for dep in dependencies.get(package_name, []): - all_deps.update(_recurse_pixi(dep, resolved, dependencies, seen)) - - resolved[package_name] = all_deps - seen.remove(package_name) - return all_deps - - -def _pixi_lock_subpackage( - *, - file: Path, - lock_spec: PixiLockSpec, - platforms: list[Platform], - yaml: YAML | None, -) -> Path: - requirements = parse_requirements(file) - locked_entries: dict[Platform, list[dict]] = defaultdict(list) - locked_packages: list[dict] = [] - locked_keys: set[tuple[CondaPip, Platform, str]] = set() - missing_keys: set[tuple[CondaPip, Platform, str]] = set() - - def add_package_with_dependencies( - which: CondaPip, - platform: Platform, - name: str, - ) -> None: - key: tuple[CondaPip, Platform, str] = (which, platform, name) - if key in locked_keys: - return - if key not in lock_spec.packages: - missing_keys.add(key) - return - pkg_infos = lock_spec.packages[key] - for pkg_info in pkg_infos: - # Add to locked_entries - locked_entries[platform].append({pkg_info["kind"]: pkg_info["url"]}) - # Add to locked_packages - locked_packages.append(pkg_info) - locked_keys.add(key) - # Recursively add dependencies - dependencies = lock_spec.dependencies.get(key, set()) - for dep_name in dependencies: - add_package_with_dependencies(which, platform, dep_name) - - for name, specs in requirements.requirements.items(): - if name.startswith("__"): - continue - for spec in specs: - _platforms = spec.platforms() - if _platforms is None: - _platforms = platforms - else: - _platforms = [p for p in _platforms if p in platforms] - - for _platform in _platforms: - add_package_with_dependencies(spec.which, _platform, name) - - if missing_keys: - print(f"โš ๏ธ Missing packages: {missing_keys}") - - # Generate subproject pixi.lock - pixi_lock_output = file.parent / "pixi.lock" - sub_lock_data = { - "version": 5, - "environments": { - "default": { - "channels": lock_spec.channels, - "indexes": lock_spec.indexes, - "packages": dict(locked_entries), - }, - }, - "packages": locked_packages, - } - - if yaml is None: - yaml = YAML(typ="rt") - yaml.default_flow_style = False - yaml.width = 4096 - yaml.representer.ignore_aliases = lambda *_: True # Disable anchors - - with pixi_lock_output.open("w") as fp: - yaml.dump(sub_lock_data, fp) - - add_comment_to_file( - pixi_lock_output, - extra_lines=[ - "#", - "# This environment can be installed with", - "# `pixi install`", - "# This file is a `pixi.lock` file generated via `unidep`.", - "# For details see https://pixi.sh/", - ], - ) - return pixi_lock_output - - def _check_consistent_lock_files( global_lock_file: Path, sub_lock_files: list[Path], @@ -342,12 +149,79 @@ def _check_consistent_lock_files( return mismatches +def _generate_sub_lock_file( + feature_name: str, + global_lock_data: dict[str, any], + yaml_obj: YAML, + output_dir: Path, +) -> Path: + """Generate a sub-lock file for a given feature. + + Parameters + ---------- + - feature_name: The name of the feature (derived from the parent folderโ€™s stem). + - global_lock_data: The global lock file data as a dict. + - yaml_obj: A ruamel.yaml YAML instance for dumping. + - output_dir: The directory where the sublock file should be written. + + Returns + ------- + - The Path to the newly written sub-lock file. + + The new lock file will contain a single environment ("default") whose contents + are exactly the environment for the given feature in the global lock file. It + also includes only the package entries from the global "packages" list that are + used by that environment. + + """ + # Look up the environment for the given feature. + envs = global_lock_data.get("environments", {}) + env_data = envs.get(feature_name) + if env_data is None: + raise ValueError(f"Feature '{feature_name}' not found in the global lock file.") + + # Create a new lock dictionary with version and a single environment renamed "default". + new_lock = { + "version": global_lock_data.get("version"), + "environments": {"default": env_data}, + } + + # Collect all URLs from the environmentโ€™s package list. + used_urls = set() + # The environment data is expected to have a "packages" key mapping each platform + # to a list of package entry dicts. + env_packages = env_data.get("packages", {}) + for platform, pkg_list in env_packages.items(): + for pkg_entry in pkg_list: + # Assume each pkg_entry is a dict with one key: either "conda" or "pypi" + for _, url in pkg_entry.items(): + used_urls.add(url) + + # Filter the global packages list to include only those entries used in this environment. + global_packages = global_lock_data.get("packages", []) + filtered_packages = [] + for pkg in global_packages: + # Check if either the value under "conda" or "pypi" is in used_urls. + if (pkg.get("conda") in used_urls) or (pkg.get("pypi") in used_urls): + filtered_packages.append(pkg) + new_lock["packages"] = filtered_packages + + # Write the new lock file into output_dir as "pixi.lock" + output_file = output_dir / "pixi.lock" + with output_file.open("w") as f: + yaml_obj.dump(new_lock, f) + return output_file + + +# Updated pixi_lock_command def pixi_lock_command( *, depth: int, directory: Path, files: list[Path] | None, - platforms: list[Platform], + platforms: list[ + any + ], # Platform type (import from unidep.platform_definitions if needed) verbose: bool, only_global: bool, ignore_pins: list[str], @@ -355,14 +229,24 @@ def pixi_lock_command( overwrite_pins: list[str], extra_flags: list[str], ) -> None: - """Generate a pixi.lock file for a collection of dependencies.""" + """Generate a pixi.lock file for a collection of dependencies. + + This command first creates a global lock file (using _pixi_lock_global). + Then, if neither only_global is True nor specific files were passed, it scans + for requirements files in subdirectories. For each such file, it derives a + feature name from the parent directoryโ€™s stem and generates a sub-lock file + that contains a single environment called "default" built from the corresponding + environment in the global lock file. + """ + # Process extra flags (assume they are prefixed with "--") if extra_flags: assert extra_flags[0] == "--" extra_flags = extra_flags[1:] if verbose: print(f"๐Ÿ“ Extra flags for `pixi lock`: {extra_flags}") - pixi_lock_output = _pixi_lock_global( + # Step 1: Generate the global lock file. + global_lock_file = _pixi_lock_global( depth=depth, directory=directory, files=files, @@ -373,29 +257,47 @@ def pixi_lock_command( skip_dependencies=skip_dependencies, extra_flags=extra_flags, ) + # If only_global is True or specific files were provided, do not generate sublock files. if only_global or files: return - with YAML(typ="safe") as yaml, pixi_lock_output.open() as fp: - global_lock_data = yaml.load(fp) + # Step 2: Load the global lock file. + yaml_obj = YAML(typ="rt") + with global_lock_file.open() as fp: + global_lock_data = yaml_obj.load(fp) - lock_specs = _parse_pixi_lock_packages(global_lock_data)["default"] - sub_lock_files = [] + # Step 3: Find all requirements files in subdirectories. found_files = find_requirements_files(directory, depth) - for file in found_files: - if file.parent == directory: + sub_lock_files = [] + for req_file in found_files: + # Skip files in the root directory. + if req_file.parent == directory: continue - sublock_file = _pixi_lock_subpackage( - file=file, - lock_spec=lock_specs, - platforms=platforms, - yaml=yaml, - ) - print(f"๐Ÿ“ Generated lock file for `{file}`: `{sublock_file}`") + + # Derive feature name from the parent directory's stem. + feature_name = req_file.resolve().parent.stem + if verbose: + print( + f"๐Ÿ” Processing sublock for feature '{feature_name}' from file: {req_file}", + ) + try: + sublock_file = _generate_sub_lock_file( + feature_name=feature_name, + global_lock_data=global_lock_data, + yaml_obj=yaml_obj, + output_dir=req_file.parent, + ) + except Exception as e: # noqa: BLE001 + print( + f"โš ๏ธ Error generating sublock for feature '{feature_name}' from {req_file}: {e}", + ) + continue + print(f"๐Ÿ“ Generated sublock file for '{req_file}': {sublock_file}") sub_lock_files.append(sublock_file) + # Step 3: Check consistency between the global and the sublock files. mismatches = _check_consistent_lock_files( - global_lock_file=pixi_lock_output, + global_lock_file=global_lock_file, sub_lock_files=sub_lock_files, ) if not mismatches: From 116be20d09acb776b5e1d7352d30eb68f6210ba0 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 14:47:02 -0800 Subject: [PATCH 35/39] fixes --- unidep/_pixi_lock.py | 66 +++++++++++++++++++++----------------------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 409f0211..fa95ebf8 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -151,7 +151,7 @@ def _check_consistent_lock_files( def _generate_sub_lock_file( feature_name: str, - global_lock_data: dict[str, any], + global_lock_data: dict[str, Any], yaml_obj: YAML, output_dir: Path, ) -> Path: @@ -159,10 +159,14 @@ def _generate_sub_lock_file( Parameters ---------- - - feature_name: The name of the feature (derived from the parent folderโ€™s stem). - - global_lock_data: The global lock file data as a dict. - - yaml_obj: A ruamel.yaml YAML instance for dumping. - - output_dir: The directory where the sublock file should be written. + feature_name + The name of the feature (derived from the parent folder's stem). + global_lock_data + The global lock file data as a dict. + yaml_obj + A ruamel.yaml YAML instance for dumping. + output_dir + The directory where the sublock file should be written. Returns ------- @@ -178,32 +182,33 @@ def _generate_sub_lock_file( envs = global_lock_data.get("environments", {}) env_data = envs.get(feature_name) if env_data is None: - raise ValueError(f"Feature '{feature_name}' not found in the global lock file.") + msg = f"Feature '{feature_name}' not found in the global lock file." + raise ValueError(msg) - # Create a new lock dictionary with version and a single environment renamed "default". + # Create a new lock dictionary with version and a single env renamed "default". new_lock = { "version": global_lock_data.get("version"), "environments": {"default": env_data}, } - # Collect all URLs from the environmentโ€™s package list. + # Collect all URLs from the environment's package list. used_urls = set() # The environment data is expected to have a "packages" key mapping each platform # to a list of package entry dicts. env_packages = env_data.get("packages", {}) - for platform, pkg_list in env_packages.items(): + for pkg_list in env_packages.values(): for pkg_entry in pkg_list: # Assume each pkg_entry is a dict with one key: either "conda" or "pypi" - for _, url in pkg_entry.items(): + for url in pkg_entry.values(): used_urls.add(url) - # Filter the global packages list to include only those entries used in this environment. + # Filter the global packages list to include only those entries used in this env. global_packages = global_lock_data.get("packages", []) - filtered_packages = [] - for pkg in global_packages: - # Check if either the value under "conda" or "pypi" is in used_urls. - if (pkg.get("conda") in used_urls) or (pkg.get("pypi") in used_urls): - filtered_packages.append(pkg) + filtered_packages = [ + pkg + for pkg in global_packages + if (pkg.get("conda") in used_urls) or (pkg.get("pypi") in used_urls) + ] new_lock["packages"] = filtered_packages # Write the new lock file into output_dir as "pixi.lock" @@ -219,9 +224,7 @@ def pixi_lock_command( depth: int, directory: Path, files: list[Path] | None, - platforms: list[ - any - ], # Platform type (import from unidep.platform_definitions if needed) + platforms: list[Platform], verbose: bool, only_global: bool, ignore_pins: list[str], @@ -234,7 +237,7 @@ def pixi_lock_command( This command first creates a global lock file (using _pixi_lock_global). Then, if neither only_global is True nor specific files were passed, it scans for requirements files in subdirectories. For each such file, it derives a - feature name from the parent directoryโ€™s stem and generates a sub-lock file + feature name from the parent directory's stem and generates a sub-lock file that contains a single environment called "default" built from the corresponding environment in the global lock file. """ @@ -257,7 +260,7 @@ def pixi_lock_command( skip_dependencies=skip_dependencies, extra_flags=extra_flags, ) - # If only_global is True or specific files were provided, do not generate sublock files. + # If only_global or specific files were provided, do not generate sublock files. if only_global or files: return @@ -278,20 +281,15 @@ def pixi_lock_command( feature_name = req_file.resolve().parent.stem if verbose: print( - f"๐Ÿ” Processing sublock for feature '{feature_name}' from file: {req_file}", - ) - try: - sublock_file = _generate_sub_lock_file( - feature_name=feature_name, - global_lock_data=global_lock_data, - yaml_obj=yaml_obj, - output_dir=req_file.parent, - ) - except Exception as e: # noqa: BLE001 - print( - f"โš ๏ธ Error generating sublock for feature '{feature_name}' from {req_file}: {e}", + f"๐Ÿ” Processing sublock for feature '{feature_name}' from file: {req_file}", # noqa: E501, ) - continue + sublock_file = _generate_sub_lock_file( + feature_name=feature_name, + global_lock_data=global_lock_data, + yaml_obj=yaml_obj, + output_dir=req_file.parent, + ) + print(f"๐Ÿ“ Generated sublock file for '{req_file}': {sublock_file}") sub_lock_files.append(sublock_file) From 8edd55c17422f5fe6c2ad582dba436e13664621c Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 15:01:56 -0800 Subject: [PATCH 36/39] format --- unidep/_pixi.py | 14 +++++++++++--- unidep/_pixi_lock.py | 1 - 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/unidep/_pixi.py b/unidep/_pixi.py index bb780931..3a97ceff 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -60,7 +60,14 @@ def _initialize_pixi_data( return pixi_data -def group_by_origin( +def _format_pin(pin: str) -> Any: + parts = pin.split() + if len(parts) == 2: # noqa: PLR2004 + return {"version": parts[0], "build": parts[1]} + return pin + + +def _group_by_origin( resolved_deps: dict[str, dict[Platform | None, dict[CondaPip, Spec]]], ) -> dict[Path, dict[str, dict[Platform | None, dict[CondaPip, Spec]]]]: groups: dict[Path, dict[str, dict[Platform | None, dict[CondaPip, Spec]]]] = {} @@ -84,7 +91,7 @@ def _process_dependencies( # noqa: PLR0912 """Process the resolved dependencies and update the pixi manifest data. This function first groups the resolved dependencies by origin (using - group_by_origin) and then creates a separate feature (under the "feature" + _group_by_origin) and then creates a separate feature (under the "feature" key in pixi_data) for each origin. The feature name is derived using the parent directory's stem of the origin file. @@ -94,7 +101,7 @@ def _process_dependencies( # noqa: PLR0912 - one environment per feature (with the feature name as the sole member). """ # --- Step 1: Group by origin and create per-origin features --- - origin_groups = group_by_origin(resolved_dependencies) + origin_groups = _group_by_origin(resolved_dependencies) features = pixi_data.setdefault("feature", {}) for origin_path, group_deps in origin_groups.items(): @@ -115,6 +122,7 @@ def _process_dependencies( # noqa: PLR0912 for pkg_name, platform_to_spec in group_conda.items(): for _platform, spec in platform_to_spec.items(): pin = spec.pin or "*" + pin = _format_pin(pin) if _platform is None: feature_entry["dependencies"][pkg_name] = pin else: diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index fa95ebf8..2a564c19 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -218,7 +218,6 @@ def _generate_sub_lock_file( return output_file -# Updated pixi_lock_command def pixi_lock_command( *, depth: int, From 8c18dc56870e30a528bffb07c13ba7bd5c990188 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 15:06:37 -0800 Subject: [PATCH 37/39] convert name --- unidep/_pixi.py | 7 ++++++- unidep/_pixi_lock.py | 4 +++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/unidep/_pixi.py b/unidep/_pixi.py index 3a97ceff..325ad620 100644 --- a/unidep/_pixi.py +++ b/unidep/_pixi.py @@ -161,10 +161,15 @@ def _process_dependencies( # noqa: PLR0912 # Also create one environment per feature. for feat in all_features: # Environment names cannot use _, only lowercase letters, digits, and - - name = feat.replace("_", "-") + name = feature_name_to_env_name(feat) pixi_data["environments"][name] = [feat] +def feature_name_to_env_name(feature_name: str) -> str: + """Convert a feature name to a valid environment name.""" + return feature_name.replace("_", "-") + + def _write_pixi_toml( pixi_data: dict[str, dict[str, Any]], output_file: str | Path | None, diff --git a/unidep/_pixi_lock.py b/unidep/_pixi_lock.py index 2a564c19..0e452f94 100644 --- a/unidep/_pixi_lock.py +++ b/unidep/_pixi_lock.py @@ -8,6 +8,7 @@ from ruamel.yaml import YAML from unidep._dependencies_parsing import find_requirements_files +from unidep._pixi import feature_name_to_env_name from unidep.utils import add_comment_to_file, change_directory if TYPE_CHECKING: @@ -180,7 +181,8 @@ def _generate_sub_lock_file( """ # Look up the environment for the given feature. envs = global_lock_data.get("environments", {}) - env_data = envs.get(feature_name) + env_name = feature_name_to_env_name(feature_name) + env_data = envs.get(env_name) if env_data is None: msg = f"Feature '{feature_name}' not found in the global lock file." raise ValueError(msg) From b7d701ff2af6718b0af0490c5a6dd408844af41f Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 15:22:21 -0800 Subject: [PATCH 38/39] original origin --- unidep/_dependencies_parsing.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/unidep/_dependencies_parsing.py b/unidep/_dependencies_parsing.py index 8972901e..03334ae5 100644 --- a/unidep/_dependencies_parsing.py +++ b/unidep/_dependencies_parsing.py @@ -267,12 +267,13 @@ def _update_data_structures( seen: set[PathWithExtras], # modified in place yaml: YAML, is_nested: bool, + origin: Path | None = None, verbose: bool = False, ) -> None: if verbose: print(f"๐Ÿ“„ Parsing `{path_with_extras.path_with_extras}`") data = _load(path_with_extras.path, yaml) - data["_origin"] = path_with_extras.path + data["_origin"] = origin or path_with_extras.path datas.append(data) _move_local_optional_dependencies_to_local_dependencies( data=data, # modified in place @@ -413,6 +414,7 @@ def _add_local_dependencies( yaml=yaml, verbose=verbose, is_nested=True, + origin=path_with_extras.path, ) From b7d792f65750e40087ea01cc6f8b75aefd6e9f52 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sat, 8 Feb 2025 20:50:10 -0800 Subject: [PATCH 39/39] alway pass --- unidep/_dependencies_parsing.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/unidep/_dependencies_parsing.py b/unidep/_dependencies_parsing.py index 03334ae5..683ac5fc 100644 --- a/unidep/_dependencies_parsing.py +++ b/unidep/_dependencies_parsing.py @@ -267,13 +267,13 @@ def _update_data_structures( seen: set[PathWithExtras], # modified in place yaml: YAML, is_nested: bool, - origin: Path | None = None, + origin: Path, verbose: bool = False, ) -> None: if verbose: print(f"๐Ÿ“„ Parsing `{path_with_extras.path_with_extras}`") data = _load(path_with_extras.path, yaml) - data["_origin"] = origin or path_with_extras.path + data["_origin"] = origin datas.append(data) _move_local_optional_dependencies_to_local_dependencies( data=data, # modified in place @@ -307,6 +307,7 @@ def _update_data_structures( all_extras=all_extras, # modified in place seen=seen, # modified in place yaml=yaml, + origin=origin, verbose=verbose, ) @@ -383,6 +384,7 @@ def _add_local_dependencies( all_extras: list[list[str]], seen: set[PathWithExtras], yaml: YAML, + origin: Path, verbose: bool = False, ) -> None: try: @@ -414,7 +416,7 @@ def _add_local_dependencies( yaml=yaml, verbose=verbose, is_nested=True, - origin=path_with_extras.path, + origin=origin, ) @@ -466,6 +468,7 @@ def parse_requirements( yaml=yaml, verbose=verbose, is_nested=False, + origin=path_with_extras.path, ) assert len(datas) == len(all_extras)