diff --git a/.github/workflows/sdk.yml b/.github/workflows/sdk.yml index 0f3a7a194bc..f35a28b3504 100644 --- a/.github/workflows/sdk.yml +++ b/.github/workflows/sdk.yml @@ -6,7 +6,7 @@ on: pull_request: {} jobs: - sdks: + typescript-sdk: runs-on: ubuntu-latest timeout-minutes: 10 steps: @@ -41,3 +41,40 @@ jobs: - name: Test SDK packages run: pnpm -r --filter ./sdk/typescript run test + + python-sdk: + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - uses: dtolnay/rust-toolchain@1.90 + + - name: Build codex + run: cargo build --bin codex + working-directory: codex-rs + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: "3.12" + enable-cache: true + cache-dependency-glob: | + sdk/python/pyproject.toml + sdk/python/uv.lock + + - name: Install Python runtime + run: uv python install 3.12 + + - name: Sync Python dependencies + run: uv sync --project sdk/python --extra dev + + - name: Lint Python SDK + run: uv run --project sdk/python ruff check + + - name: Type-check Python SDK + run: uv run --project sdk/python mypy --config-file sdk/python/pyproject.toml sdk/python/src/codex + + - name: Test Python SDK + run: uv run --project sdk/python pytest diff --git a/.gitignore b/.gitignore index 178239c0a0c..08dc176eee1 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,11 @@ yarn-error.log* # env .env* !.env.example +**/.venv/ +**/.ruff_cache/ +sdk/python/src/codex/vendor/** +!sdk/python/src/codex/vendor/ +!sdk/python/src/codex/vendor/README.md # package *.tgz diff --git a/scripts/readme_toc.py b/scripts/readme_toc.py index b6ab0a6582d..56a1412976e 100755 --- a/scripts/readme_toc.py +++ b/scripts/readme_toc.py @@ -76,8 +76,8 @@ def check_or_fix(readme_path: Path, fix: bool) -> int: lines = content.splitlines() # locate ToC markers try: - begin_idx = next(i for i, l in enumerate(lines) if l.strip() == BEGIN_TOC) - end_idx = next(i for i, l in enumerate(lines) if l.strip() == END_TOC) + begin_idx = next(i for i, line in enumerate(lines) if line.strip() == BEGIN_TOC) + end_idx = next(i for i, line in enumerate(lines) if line.strip() == END_TOC) except StopIteration: # No ToC markers found; treat as a no-op so repos without a ToC don't fail CI print( @@ -86,7 +86,7 @@ def check_or_fix(readme_path: Path, fix: bool) -> int: return 0 # extract current ToC list items current_block = lines[begin_idx + 1 : end_idx] - current = [l for l in current_block if l.lstrip().startswith("- [")] + current = [line for line in current_block if line.lstrip().startswith("- [")] # generate expected ToC expected = generate_toc_lines(content) if current == expected: diff --git a/sdk/python/.python-version b/sdk/python/.python-version new file mode 100644 index 00000000000..24ee5b1be99 --- /dev/null +++ b/sdk/python/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/sdk/python/README.md b/sdk/python/README.md new file mode 100644 index 00000000000..cb33a6a525c --- /dev/null +++ b/sdk/python/README.md @@ -0,0 +1,103 @@ +# Codex Python SDK + +Embed the Codex agent in Python workflows. This SDK shells out to the bundled `codex` CLI, streams +structured events, and provides strongly-typed helpers for synchronous and streaming turns. + +## Status + +- Target Python 3.12+. +- API and packaging are pre-alpha; expect breaking changes. +- Binaries are bundled under `codex/vendor` for supported triples. + +## Quickstart + +```python +from codex import Codex + +client = Codex() +thread = client.start_thread() +turn = thread.run("Summarize the latest CI failure.") + +print(turn.final_response) +for item in turn.items: + print(item) +``` + +## Streaming + +```python +from codex import Codex + +client = Codex() +thread = client.start_thread() + +stream = thread.run_streamed("Implement the fix.") +for event in stream: + print(event) +``` + +## Structured Output + +```python +from codex import Codex, TurnOptions + +schema = { + "type": "object", + "properties": { + "summary": {"type": "string"}, + "status": {"type": "string", "enum": ["ok", "action_required"]}, + }, + "required": ["summary", "status"], + "additionalProperties": False, +} + +thread = Codex().start_thread() +turn = thread.run("Summarize repository status", TurnOptions(output_schema=schema)) +print(turn.final_response) +``` + +### Structured output with Pydantic (optional) + +If you use [Pydantic](https://docs.pydantic.dev/latest/) v2, you can pass a model class or instance directly. The SDK converts it to JSON Schema automatically: + +```python +from pydantic import BaseModel +from codex import Codex, TurnOptions + + +class StatusReport(BaseModel): + summary: str + status: str + + +thread = Codex().start_thread() +turn = thread.run( + "Summarize repository status", + TurnOptions(output_schema=StatusReport), +) +print(turn.final_response) +``` + +## Development + +- Install dependencies with `uv sync --extra dev`. +- Run formatting and linting: `uv run ruff check .` and `uv run ruff format .`. +- Type-check with `uv run mypy --config-file pyproject.toml src/codex`. +- Tests via `uv run pytest`. + +### Bundling native binaries + +The SDK shells out to the Rust `codex` executable. For local testing we point at +`codex-rs/target/debug/codex`, but release builds should bundle the official +artifacts in `src/codex/vendor/` just like the TypeScript SDK. Use the helper +script to fetch prebuilt binaries from the Rust release workflow: + +```bash +uv run python sdk/python/scripts/install_native_deps.py --clean --workflow-url +``` + +Omit `--workflow-url` to use the default pinned run. After bundling, build the +wheel/sdist with `uv build` (or `python -m build`). The `vendor/` directory is +ignored by git aside from its README, so remember to run the script before +cutting a release. + diff --git a/sdk/python/examples/basic_streaming.py b/sdk/python/examples/basic_streaming.py new file mode 100644 index 00000000000..3c5c15817ce --- /dev/null +++ b/sdk/python/examples/basic_streaming.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from codex import Codex, ItemCompletedEvent, TurnCompletedEvent + + +def main() -> None: + client = Codex() + thread = client.start_thread() + + stream = thread.run_streamed("Summarize repository health") + for event in stream: + match event: + case ItemCompletedEvent(item=item): + print(f"item[{item.type}]: {item}") + case TurnCompletedEvent(usage=usage): + print( + "usage: input=%s cached=%s output=%s" + % (usage.input_tokens, usage.cached_input_tokens, usage.output_tokens) + ) + case _: + print(event) + + +if __name__ == "__main__": + main() diff --git a/sdk/python/examples/structured_output.py b/sdk/python/examples/structured_output.py new file mode 100644 index 00000000000..9a2b1a32cf3 --- /dev/null +++ b/sdk/python/examples/structured_output.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from codex import Codex, TurnOptions + + +SCHEMA = { + "type": "object", + "properties": { + "summary": {"type": "string"}, + "status": {"type": "string", "enum": ["ok", "action_required"]}, + }, + "required": ["summary", "status"], + "additionalProperties": False, +} + + +def main() -> None: + thread = Codex().start_thread() + turn = thread.run("Summarize repository status", TurnOptions(output_schema=SCHEMA)) + print(turn.final_response) + + +if __name__ == "__main__": + main() diff --git a/sdk/python/pyproject.toml b/sdk/python/pyproject.toml new file mode 100644 index 00000000000..ec31a20a4b6 --- /dev/null +++ b/sdk/python/pyproject.toml @@ -0,0 +1,49 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "openai-codex-sdk" +version = "0.0.1a0" +description = "Python SDK for Codex APIs." +readme = "README.md" +requires-python = ">=3.12" +license = { text = "Apache-2.0" } +authors = [{ name = "OpenAI" }] +keywords = ["codex", "sdk", "agents", "cli"] +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Typing :: Typed", +] +dependencies = [] + +[project.optional-dependencies] +dev = ["pytest>=8.4", "mypy>=1.18", "ruff>=0.5", "pydantic>=2.7"] + +[project.urls] +Homepage = "https://github.com/openai/codex" +Repository = "https://github.com/openai/codex/tree/main/sdk/python" + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build] +packages = ["src/codex"] +include = ["examples", "src/codex/vendor", "src/codex/py.typed"] + +[tool.pytest.ini_options] +pythonpath = ["src"] +filterwarnings = ["error"] + +[tool.ruff] +line-length = 100 +target-version = "py312" + +[tool.mypy] +python_version = "3.12" +files = "src/codex" +strict = true diff --git a/sdk/python/scripts/install_native_deps.py b/sdk/python/scripts/install_native_deps.py new file mode 100755 index 00000000000..90a62c91736 --- /dev/null +++ b/sdk/python/scripts/install_native_deps.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +"""Install Codex native binaries for the Python SDK.""" + +from __future__ import annotations + +import argparse +import shutil +import subprocess +from pathlib import Path + +REPO_ROOT = Path(__file__).resolve().parents[2] +INSTALL_NATIVE_DEPS = REPO_ROOT / "codex-cli" / "scripts" / "install_native_deps.py" +PYTHON_SDK_ROOT = REPO_ROOT / "sdk" / "python" +PACKAGE_ROOT = PYTHON_SDK_ROOT / "src" / "codex" +VENDOR_DIR = PACKAGE_ROOT / "vendor" + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--workflow-url", + help=( + "GitHub Actions workflow URL containing the prebuilt Codex binaries. " + "If omitted, the default from install_native_deps.py is used." + ), + ) + parser.add_argument( + "--component", + dest="components", + action="append", + default=["codex"], + choices=("codex", "rg", "codex-responses-api-proxy"), + help="Native component(s) to install (default: codex).", + ) + parser.add_argument( + "--clean", + action="store_true", + help="Remove the existing vendor directory before installing binaries.", + ) + return parser.parse_args() + + +def ensure_install_script() -> None: + if not INSTALL_NATIVE_DEPS.exists(): + raise FileNotFoundError(f"install_native_deps.py not found at {INSTALL_NATIVE_DEPS}") + + +def run_install(workflow_url: str | None, components: list[str]) -> None: + cmd = [str(INSTALL_NATIVE_DEPS)] + + if workflow_url: + cmd.extend(["--workflow-url", workflow_url]) + + for component in components: + cmd.extend(["--component", component]) + + cmd.append(str(PACKAGE_ROOT)) + + subprocess.run(cmd, check=True, cwd=REPO_ROOT) + + +def clean_vendor() -> None: + if VENDOR_DIR.exists(): + shutil.rmtree(VENDOR_DIR) + + +def main() -> int: + args = parse_args() + ensure_install_script() + + if args.clean: + clean_vendor() + + run_install(args.workflow_url, args.components) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/sdk/python/src/codex/__init__.py b/sdk/python/src/codex/__init__.py new file mode 100644 index 00000000000..b63bcfc8157 --- /dev/null +++ b/sdk/python/src/codex/__init__.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from .client import Codex +from .config import CodexOptions, SandboxMode, ThreadOptions, TurnOptions, ApprovalMode +from .events import ( + ThreadEvent, + ThreadStartedEvent, + TurnStartedEvent, + TurnCompletedEvent, + TurnFailedEvent, + ItemStartedEvent, + ItemUpdatedEvent, + ItemCompletedEvent, + ThreadErrorEvent, + Usage, +) +from .items import ( + ThreadItem, + AgentMessageItem, + ReasoningItem, + CommandExecutionItem, + CommandExecutionStatus, + FileChangeItem, + PatchApplyStatus, + PatchChangeKind, + McpToolCallItem, + McpToolCallStatus, + WebSearchItem, + TodoListItem, + ErrorItem, +) +from .thread import Thread, ThreadRunResult, ThreadStream +from .exceptions import ( + CodexError, + UnsupportedPlatformError, + SpawnError, + ExecExitError, + JsonParseError, + ThreadRunError, + SchemaValidationError, +) + +__all__ = [ + "Codex", + "CodexOptions", + "ThreadOptions", + "TurnOptions", + "SandboxMode", + "ApprovalMode", + "Thread", + "ThreadRunResult", + "ThreadStream", + "ThreadEvent", + "ThreadStartedEvent", + "TurnStartedEvent", + "TurnCompletedEvent", + "TurnFailedEvent", + "ItemStartedEvent", + "ItemUpdatedEvent", + "ItemCompletedEvent", + "ThreadErrorEvent", + "Usage", + "ThreadItem", + "AgentMessageItem", + "ReasoningItem", + "CommandExecutionItem", + "CommandExecutionStatus", + "FileChangeItem", + "PatchApplyStatus", + "PatchChangeKind", + "McpToolCallItem", + "McpToolCallStatus", + "WebSearchItem", + "TodoListItem", + "ErrorItem", + "CodexError", + "UnsupportedPlatformError", + "SpawnError", + "ExecExitError", + "JsonParseError", + "ThreadRunError", + "SchemaValidationError", +] diff --git a/sdk/python/src/codex/client.py b/sdk/python/src/codex/client.py new file mode 100644 index 00000000000..a8bd18ee842 --- /dev/null +++ b/sdk/python/src/codex/client.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from typing import Optional + +from .config import CodexOptions, ThreadOptions +from .exec import CodexExec +from .thread import Thread + + +class Codex: + def __init__(self, options: Optional[CodexOptions] = None) -> None: + opts = options or CodexOptions() + self._options = opts + self._exec = CodexExec(opts.codex_path_override) + + def start_thread(self, options: Optional[ThreadOptions] = None) -> Thread: + thread_options = options or ThreadOptions() + return Thread(self._exec, self._options, thread_options) + + def resume_thread(self, thread_id: str, options: Optional[ThreadOptions] = None) -> Thread: + thread_options = options or ThreadOptions() + return Thread(self._exec, self._options, thread_options, thread_id) diff --git a/sdk/python/src/codex/config.py b/sdk/python/src/codex/config.py new file mode 100644 index 00000000000..0ab71e3ef32 --- /dev/null +++ b/sdk/python/src/codex/config.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum +from typing import Mapping, Optional, TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover - typing only + from pydantic import BaseModel as PydanticBaseModel + SchemaInput = Mapping[str, object] | type[PydanticBaseModel] | PydanticBaseModel +else: + SchemaInput = Mapping[str, object] + + +class ApprovalMode(StrEnum): + NEVER = "never" + ON_REQUEST = "on-request" + ON_FAILURE = "on-failure" + UNTRUSTED = "untrusted" + + +class SandboxMode(StrEnum): + READ_ONLY = "read-only" + WORKSPACE_WRITE = "workspace-write" + DANGER_FULL_ACCESS = "danger-full-access" + + +@dataclass(frozen=True, slots=True) +class CodexOptions: + codex_path_override: Optional[str] = None + base_url: Optional[str] = None + api_key: Optional[str] = None + + +@dataclass(frozen=True, slots=True) +class ThreadOptions: + model: Optional[str] = None + sandbox_mode: Optional[SandboxMode] = None + working_directory: Optional[str] = None + skip_git_repo_check: bool = False + + +@dataclass(frozen=True, slots=True) +class TurnOptions: + output_schema: Optional[SchemaInput] = None diff --git a/sdk/python/src/codex/discovery.py b/sdk/python/src/codex/discovery.py new file mode 100644 index 00000000000..2bb475c1be0 --- /dev/null +++ b/sdk/python/src/codex/discovery.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import platform +import sys +from pathlib import Path + +from .exceptions import UnsupportedPlatformError + + +def _detect_target() -> str: + system = sys.platform + machine = platform.machine().lower() + + if system in {"linux", "linux2"}: + if machine in {"x86_64", "amd64"}: + return "x86_64-unknown-linux-musl" + if machine in {"aarch64", "arm64"}: + return "aarch64-unknown-linux-musl" + elif system == "darwin": + if machine == "x86_64": + return "x86_64-apple-darwin" + if machine in {"arm64", "aarch64"}: + return "aarch64-apple-darwin" + elif system == "win32": + if machine in {"x86_64", "amd64"}: + return "x86_64-pc-windows-msvc" + if machine in {"arm64", "aarch64"}: + return "aarch64-pc-windows-msvc" + + raise UnsupportedPlatformError(system, machine) + + +def find_codex_binary(override: str | None = None) -> Path: + if override: + return Path(override) + + target = _detect_target() + package_root = Path(__file__).resolve().parent + vendor_root = package_root / "vendor" / target / "codex" + binary_name = "codex.exe" if sys.platform == "win32" else "codex" + binary_path = vendor_root / binary_name + return binary_path diff --git a/sdk/python/src/codex/events.py b/sdk/python/src/codex/events.py new file mode 100644 index 00000000000..1832ddbc7f5 --- /dev/null +++ b/sdk/python/src/codex/events.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Literal + +from .exceptions import CodexError +from .items import ThreadItem, parse_thread_item + + +@dataclass(frozen=True, slots=True) +class Usage: + input_tokens: int + cached_input_tokens: int + output_tokens: int + + +@dataclass(frozen=True, slots=True) +class ThreadError: + message: str + + +@dataclass(frozen=True, slots=True) +class ThreadStartedEvent: + type: Literal["thread.started"] = field(default="thread.started", init=False) + thread_id: str + + +@dataclass(frozen=True, slots=True) +class TurnStartedEvent: + type: Literal["turn.started"] = field(default="turn.started", init=False) + + +@dataclass(frozen=True, slots=True) +class TurnCompletedEvent: + type: Literal["turn.completed"] = field(default="turn.completed", init=False) + usage: Usage + + +@dataclass(frozen=True, slots=True) +class TurnFailedEvent: + type: Literal["turn.failed"] = field(default="turn.failed", init=False) + error: ThreadError + + +@dataclass(frozen=True, slots=True) +class ItemStartedEvent: + type: Literal["item.started"] = field(default="item.started", init=False) + item: ThreadItem + + +@dataclass(frozen=True, slots=True) +class ItemUpdatedEvent: + type: Literal["item.updated"] = field(default="item.updated", init=False) + item: ThreadItem + + +@dataclass(frozen=True, slots=True) +class ItemCompletedEvent: + type: Literal["item.completed"] = field(default="item.completed", init=False) + item: ThreadItem + + +@dataclass(frozen=True, slots=True) +class ThreadErrorEvent: + type: Literal["error"] = field(default="error", init=False) + message: str + + +ThreadEvent = ( + ThreadStartedEvent + | TurnStartedEvent + | TurnCompletedEvent + | TurnFailedEvent + | ItemStartedEvent + | ItemUpdatedEvent + | ItemCompletedEvent + | ThreadErrorEvent +) + + +def _ensure_dict(payload: object) -> dict[str, object]: + if isinstance(payload, dict): + return payload + raise CodexError("Event payload must be an object") + + +def _ensure_str(value: object, field: str) -> str: + if isinstance(value, str): + return value + raise CodexError(f"Expected string for {field}") + + +def _ensure_int(value: object, field: str) -> int: + if isinstance(value, int): + return value + raise CodexError(f"Expected integer for {field}") + + +def _parse_usage(payload: object) -> Usage: + data = _ensure_dict(payload) + return Usage( + input_tokens=_ensure_int(data.get("input_tokens"), "input_tokens"), + cached_input_tokens=_ensure_int(data.get("cached_input_tokens"), "cached_input_tokens"), + output_tokens=_ensure_int(data.get("output_tokens"), "output_tokens"), + ) + + +def parse_thread_event(payload: object) -> ThreadEvent: + data = _ensure_dict(payload) + type_name = _ensure_str(data.get("type"), "type") + + if type_name == "thread.started": + thread_id = _ensure_str(data.get("thread_id"), "thread_id") + return ThreadStartedEvent(thread_id=thread_id) + + if type_name == "turn.started": + return TurnStartedEvent() + + if type_name == "turn.completed": + usage = _parse_usage(data.get("usage")) + return TurnCompletedEvent(usage=usage) + + if type_name == "turn.failed": + error_payload = _ensure_dict(data.get("error")) + message = _ensure_str(error_payload.get("message"), "error.message") + return TurnFailedEvent(error=ThreadError(message=message)) + + if type_name in {"item.started", "item.updated", "item.completed"}: + item_payload = data.get("item") + item = parse_thread_item(item_payload) + if type_name == "item.started": + return ItemStartedEvent(item=item) + if type_name == "item.updated": + return ItemUpdatedEvent(item=item) + return ItemCompletedEvent(item=item) + + if type_name == "error": + message = _ensure_str(data.get("message"), "message") + return ThreadErrorEvent(message=message) + + raise CodexError(f"Unsupported event type: {type_name}") diff --git a/sdk/python/src/codex/exceptions.py b/sdk/python/src/codex/exceptions.py new file mode 100644 index 00000000000..843d42868ce --- /dev/null +++ b/sdk/python/src/codex/exceptions.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Sequence + + +class CodexError(Exception): + """Base exception for Codex SDK.""" + + +def _format_command(command: Sequence[str] | None) -> str: + if not command: + return "" + return " ".join(command) + + +class UnsupportedPlatformError(CodexError): + def __init__(self, platform: str, machine: str) -> None: + message = f"Unsupported platform: {platform} ({machine})" + super().__init__(message) + self.platform = platform + self.machine = machine + + +class SpawnError(CodexError): + def __init__(self, command: Sequence[str] | None, error: OSError) -> None: + self.command = list(command) if command else None + self.original_error = error + super().__init__(f"Failed to spawn codex exec: {_format_command(self.command)}: {error}") + + +@dataclass(slots=True) +class ExecExitError(CodexError): + command: tuple[str, ...] + exit_code: int + stderr: str + + def __str__(self) -> str: # pragma: no cover - trivial formatting + stderr = self.stderr.strip() + tail = f": {stderr}" if stderr else "" + return f"codex exec exited with code {self.exit_code}{tail}" + + +@dataclass(slots=True) +class JsonParseError(CodexError): + raw_line: str + command: tuple[str, ...] + + def __str__(self) -> str: # pragma: no cover - trivial formatting + sample = self.raw_line + if len(sample) > 200: + sample = sample[:197] + "..." + return f"Failed to parse codex event: {sample}" + + +class ThreadRunError(CodexError): + def __init__(self, message: str) -> None: + super().__init__(message) + + +class SchemaValidationError(CodexError): + def __init__(self, message: str) -> None: + super().__init__(message) diff --git a/sdk/python/src/codex/exec.py b/sdk/python/src/codex/exec.py new file mode 100644 index 00000000000..0a3ca65e7e7 --- /dev/null +++ b/sdk/python/src/codex/exec.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +import io +import os +import subprocess +from dataclasses import dataclass +from threading import Thread +from typing import Iterator, Optional + +from .config import SandboxMode +from .discovery import find_codex_binary +from .exceptions import ExecExitError, SpawnError + +INTERNAL_ORIGINATOR_ENV = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE" +PYTHON_SDK_ORIGINATOR = "codex_sdk_py" + + +@dataclass(frozen=True, slots=True) +class ExecArgs: + input: str + base_url: Optional[str] = None + api_key: Optional[str] = None + thread_id: Optional[str] = None + model: Optional[str] = None + sandbox_mode: Optional[SandboxMode] = None + working_directory: Optional[str] = None + skip_git_repo_check: bool = False + output_schema_path: Optional[str] = None + + +class CodexExec: + def __init__(self, executable_override: Optional[str] = None) -> None: + self._binary = find_codex_binary(executable_override) + + def build_command(self, args: ExecArgs) -> list[str]: + command = [str(self._binary), "exec", "--experimental-json"] + + if args.model: + command.extend(["--model", args.model]) + if args.sandbox_mode: + command.extend(["--sandbox", args.sandbox_mode.value]) + if args.working_directory: + command.extend(["--cd", args.working_directory]) + if args.skip_git_repo_check: + command.append("--skip-git-repo-check") + if args.output_schema_path: + command.extend(["--output-schema", args.output_schema_path]) + if args.thread_id: + command.extend(["resume", args.thread_id]) + + return command + + def run_lines(self, args: ExecArgs) -> Iterator[str]: + command = self.build_command(args) + + env = os.environ.copy() + env.setdefault(INTERNAL_ORIGINATOR_ENV, PYTHON_SDK_ORIGINATOR) + if args.base_url: + env["OPENAI_BASE_URL"] = args.base_url + if args.api_key: + env["CODEX_API_KEY"] = args.api_key + + stderr_buffer: list[str] = [] + + try: + process = subprocess.Popen( + command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + encoding="utf-8", + errors="strict", + env=env, + ) + except OSError as error: # pragma: no cover - exercised indirectly + raise SpawnError(command, error) from error + + if not process.stdin or not process.stdout: + process.kill() + raise SpawnError(command, OSError("Missing stdio pipes")) + + stderr_thread: Thread | None = None + if process.stderr: + def _drain_stderr(pipe: io.TextIOBase, buffer: list[str]) -> None: + while True: + try: + chunk = pipe.readline() + except ValueError: + break + if chunk == "": + break + buffer.append(chunk) + + stderr_thread = Thread( + target=_drain_stderr, + args=(process.stderr, stderr_buffer), + daemon=True, + ) + stderr_thread.start() + + try: + process.stdin.write(args.input) + process.stdin.close() + + for line in iter(process.stdout.readline, ""): + yield line.rstrip("\n") + + return_code = process.wait() + if stderr_thread is not None: + stderr_thread.join() + + stderr_output = "".join(stderr_buffer) + if return_code != 0: + raise ExecExitError(tuple(command), return_code, stderr_output) + finally: + if process.stdout and not process.stdout.closed: + process.stdout.close() + if process.stderr and not process.stderr.closed: + try: + process.stderr.close() + except ValueError: + pass + if stderr_thread is not None and stderr_thread.is_alive(): + stderr_thread.join(timeout=0.1) + returncode = process.poll() + if returncode is None: + process.kill() + try: + process.wait(timeout=0.5) + except subprocess.TimeoutExpired: + process.wait() diff --git a/sdk/python/src/codex/items.py b/sdk/python/src/codex/items.py new file mode 100644 index 00000000000..7a7e16e52fd --- /dev/null +++ b/sdk/python/src/codex/items.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import StrEnum +from typing import Iterable, Literal, Sequence, cast + +from .exceptions import CodexError + + +class CommandExecutionStatus(StrEnum): + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + FAILED = "failed" + + +class PatchChangeKind(StrEnum): + ADD = "add" + DELETE = "delete" + UPDATE = "update" + + +class PatchApplyStatus(StrEnum): + COMPLETED = "completed" + FAILED = "failed" + + +class McpToolCallStatus(StrEnum): + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + FAILED = "failed" + + +@dataclass(frozen=True, slots=True) +class CommandExecutionItem: + type: Literal["command_execution"] = field(default="command_execution", init=False) + id: str + command: str + aggregated_output: str + status: CommandExecutionStatus + exit_code: int | None = None + + +@dataclass(frozen=True, slots=True) +class FileUpdateChange: + path: str + kind: PatchChangeKind + + +@dataclass(frozen=True, slots=True) +class FileChangeItem: + type: Literal["file_change"] = field(default="file_change", init=False) + id: str + changes: Sequence[FileUpdateChange] + status: PatchApplyStatus + + +@dataclass(frozen=True, slots=True) +class McpToolCallItem: + type: Literal["mcp_tool_call"] = field(default="mcp_tool_call", init=False) + id: str + server: str + tool: str + status: McpToolCallStatus + + +@dataclass(frozen=True, slots=True) +class AgentMessageItem: + type: Literal["agent_message"] = field(default="agent_message", init=False) + id: str + text: str + + +@dataclass(frozen=True, slots=True) +class ReasoningItem: + type: Literal["reasoning"] = field(default="reasoning", init=False) + id: str + text: str + + +@dataclass(frozen=True, slots=True) +class WebSearchItem: + type: Literal["web_search"] = field(default="web_search", init=False) + id: str + query: str + + +@dataclass(frozen=True, slots=True) +class ErrorItem: + type: Literal["error"] = field(default="error", init=False) + id: str + message: str + + +@dataclass(frozen=True, slots=True) +class TodoItem: + text: str + completed: bool + + +@dataclass(frozen=True, slots=True) +class TodoListItem: + type: Literal["todo_list"] = field(default="todo_list", init=False) + id: str + items: Sequence[TodoItem] + + +ThreadItem = ( + AgentMessageItem + | ReasoningItem + | CommandExecutionItem + | FileChangeItem + | McpToolCallItem + | WebSearchItem + | TodoListItem + | ErrorItem +) + + +def _ensure_str(value: object, field: str) -> str: + if isinstance(value, str): + return value + raise CodexError(f"Expected string for {field}") + + +def _ensure_sequence(value: object, field: str) -> Sequence[object]: + if isinstance(value, Sequence) and not isinstance(value, (str, bytes)): + return cast(Sequence[object], value) + raise CodexError(f"Expected sequence for {field}") + + +def _parse_changes(values: Iterable[object]) -> list[FileUpdateChange]: + changes: list[FileUpdateChange] = [] + for value in values: + if not isinstance(value, dict): + raise CodexError("Invalid file change entry") + path = _ensure_str(value.get("path"), "path") + kind = _ensure_str(value.get("kind"), "kind") + try: + enum_kind = PatchChangeKind(kind) + except ValueError as exc: + raise CodexError(f"Unsupported file change kind: {kind}") from exc + changes.append(FileUpdateChange(path=path, kind=enum_kind)) + return changes + + +def _parse_todos(values: Iterable[object]) -> list[TodoItem]: + todos: list[TodoItem] = [] + for value in values: + if not isinstance(value, dict): + raise CodexError("Invalid todo entry") + text = _ensure_str(value.get("text"), "text") + completed = bool(value.get("completed", False)) + todos.append(TodoItem(text=text, completed=completed)) + return todos + + +def parse_thread_item(payload: object) -> ThreadItem: + if not isinstance(payload, dict): + raise CodexError("Thread item must be an object") + + type_name = _ensure_str(payload.get("type"), "type") + item_id = _ensure_str(payload.get("id"), "id") + + if type_name == "agent_message": + text = _ensure_str(payload.get("text"), "text") + return AgentMessageItem(id=item_id, text=text) + + if type_name == "reasoning": + text = _ensure_str(payload.get("text"), "text") + return ReasoningItem(id=item_id, text=text) + + if type_name == "command_execution": + command = _ensure_str(payload.get("command"), "command") + aggregated_output = _ensure_str(payload.get("aggregated_output"), "aggregated_output") + status_str = _ensure_str(payload.get("status"), "status") + try: + status = CommandExecutionStatus(status_str) + except ValueError as exc: + raise CodexError(f"Unsupported command execution status: {status_str}") from exc + exit_code = payload.get("exit_code") + exit_value = int(exit_code) if isinstance(exit_code, int) else None + return CommandExecutionItem( + id=item_id, + command=command, + aggregated_output=aggregated_output, + status=status, + exit_code=exit_value, + ) + + if type_name == "file_change": + changes_raw = _ensure_sequence(payload.get("changes"), "changes") + status_str = _ensure_str(payload.get("status"), "status") + try: + change_status = PatchApplyStatus(status_str) + except ValueError as exc: + raise CodexError(f"Unsupported file change status: {status_str}") from exc + changes = _parse_changes(changes_raw) + return FileChangeItem(id=item_id, changes=changes, status=change_status) + + if type_name == "mcp_tool_call": + server = _ensure_str(payload.get("server"), "server") + tool = _ensure_str(payload.get("tool"), "tool") + status_str = _ensure_str(payload.get("status"), "status") + try: + call_status = McpToolCallStatus(status_str) + except ValueError as exc: + raise CodexError(f"Unsupported MCP tool call status: {status_str}") from exc + return McpToolCallItem( + id=item_id, + server=server, + tool=tool, + status=call_status, + ) + + if type_name == "web_search": + query = _ensure_str(payload.get("query"), "query") + return WebSearchItem(id=item_id, query=query) + + if type_name == "error": + message = _ensure_str(payload.get("message"), "message") + return ErrorItem(id=item_id, message=message) + + if type_name == "todo_list": + todos_raw = _ensure_sequence(payload.get("items"), "items") + todos = _parse_todos(todos_raw) + return TodoListItem(id=item_id, items=todos) + + raise CodexError(f"Unsupported item type: {type_name}") diff --git a/sdk/python/src/codex/py.typed b/sdk/python/src/codex/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/src/codex/schema.py b/sdk/python/src/codex/schema.py new file mode 100644 index 00000000000..92f42a40fee --- /dev/null +++ b/sdk/python/src/codex/schema.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import json +import tempfile +from collections.abc import Mapping +from pathlib import Path +from types import TracebackType +from typing import Any, Type, cast +from functools import lru_cache + +from .exceptions import SchemaValidationError +from .config import SchemaInput + + +@lru_cache(maxsize=1) +def _get_pydantic_base_model() -> Type[Any] | None: # pragma: no cover - import guard + try: + from pydantic import BaseModel + except ImportError: + return None + return cast(Type[Any], BaseModel) + + +def _is_pydantic_model(value: object) -> bool: + base_model = _get_pydantic_base_model() + return isinstance(value, type) and base_model is not None and issubclass(value, base_model) + + +def _is_pydantic_instance(value: object) -> bool: + base_model = _get_pydantic_base_model() + return base_model is not None and isinstance(value, base_model) + + +def _convert_schema_input(schema: SchemaInput | None) -> Mapping[str, object] | None: + if schema is None or isinstance(schema, Mapping): + return schema + + if _is_pydantic_model(schema): + return cast(Mapping[str, object], schema.model_json_schema()) + + if _is_pydantic_instance(schema): + return cast(Mapping[str, object], schema.model_json_schema()) + + raise SchemaValidationError( + "output_schema must be a mapping or a Pydantic BaseModel (class or instance)", + ) + + +class SchemaTempFile: + def __init__(self, schema: SchemaInput | None) -> None: + self._raw_schema = schema + self._temp_dir: tempfile.TemporaryDirectory[str] | None = None + self.path: Path | None = None + + def __enter__(self) -> SchemaTempFile: + schema = _convert_schema_input(self._raw_schema) + if schema is None: + return self + + for key in schema.keys(): + if not isinstance(key, str): + raise SchemaValidationError("output_schema keys must be strings") + + self._temp_dir = tempfile.TemporaryDirectory(prefix="codex-output-schema-") + schema_dir = Path(self._temp_dir.name) + schema_path = schema_dir / "schema.json" + + with schema_path.open("w", encoding="utf-8") as handle: + json.dump(schema, handle, ensure_ascii=False) + self.path = schema_path + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.cleanup() + + def cleanup(self) -> None: + if self._temp_dir is not None: + self._temp_dir.cleanup() + self._temp_dir = None + self.path = None + + +def prepare_schema_file(schema: SchemaInput | None) -> SchemaTempFile: + return SchemaTempFile(schema) diff --git a/sdk/python/src/codex/thread.py b/sdk/python/src/codex/thread.py new file mode 100644 index 00000000000..67c65640926 --- /dev/null +++ b/sdk/python/src/codex/thread.py @@ -0,0 +1,113 @@ +from __future__ import annotations + +import json +from dataclasses import dataclass +from typing import Iterator, Optional + +from .config import CodexOptions, ThreadOptions, TurnOptions +from .events import ( + ItemCompletedEvent, + ThreadErrorEvent, + ThreadEvent, + ThreadStartedEvent, + TurnCompletedEvent, + TurnFailedEvent, + Usage, + parse_thread_event, +) +from .exceptions import JsonParseError, ThreadRunError +from .exec import CodexExec, ExecArgs +from .items import AgentMessageItem, ThreadItem +from .schema import prepare_schema_file + + +@dataclass(frozen=True, slots=True) +class ThreadRunResult: + items: list[ThreadItem] + final_response: str + usage: Optional[Usage] + + +@dataclass(frozen=True, slots=True) +class ThreadStream: + events: Iterator[ThreadEvent] + + def __iter__(self) -> Iterator[ThreadEvent]: + return self.events + + +class Thread: + def __init__( + self, + exec_client: CodexExec, + codex_options: CodexOptions, + thread_options: ThreadOptions, + thread_id: Optional[str] = None, + ) -> None: + self._exec = exec_client + self._codex_options = codex_options + self._thread_options = thread_options + self._id = thread_id + + @property + def id(self) -> Optional[str]: + return self._id + + def run_streamed(self, prompt: str, turn_options: Optional[TurnOptions] = None) -> ThreadStream: + events = self._stream_events(prompt, turn_options) + return ThreadStream(events=events) + + def run(self, prompt: str, turn_options: Optional[TurnOptions] = None) -> ThreadRunResult: + final_response = "" + items: list[ThreadItem] = [] + usage: Optional[Usage] = None + failure_message: Optional[str] = None + + for event in self._stream_events(prompt, turn_options): + if isinstance(event, ThreadErrorEvent): + raise ThreadRunError(event.message) + if isinstance(event, TurnFailedEvent): + failure_message = event.error.message + break + if isinstance(event, TurnCompletedEvent): + usage = event.usage + if isinstance(event, ItemCompletedEvent): + item = event.item + items.append(item) + if isinstance(item, AgentMessageItem): + final_response = item.text + + if failure_message is not None: + raise ThreadRunError(failure_message) + + return ThreadRunResult(items=items, final_response=final_response, usage=usage) + + def _stream_events( + self, + prompt: str, + turn_options: Optional[TurnOptions], + ) -> Iterator[ThreadEvent]: + turn = turn_options or TurnOptions() + with prepare_schema_file(turn.output_schema) as schema_file: + exec_args = ExecArgs( + input=prompt, + base_url=self._codex_options.base_url, + api_key=self._codex_options.api_key, + thread_id=self._id, + model=self._thread_options.model, + sandbox_mode=self._thread_options.sandbox_mode, + working_directory=self._thread_options.working_directory, + skip_git_repo_check=self._thread_options.skip_git_repo_check, + output_schema_path=str(schema_file.path) if schema_file.path else None, + ) + command = tuple(self._exec.build_command(exec_args)) + for line in self._exec.run_lines(exec_args): + try: + payload = json.loads(line) + except json.JSONDecodeError as error: + raise JsonParseError(line, command) from error + + event = parse_thread_event(payload) + if isinstance(event, ThreadStartedEvent): + self._id = event.thread_id + yield event diff --git a/sdk/python/src/codex/vendor/README.md b/sdk/python/src/codex/vendor/README.md new file mode 100644 index 00000000000..b4e199535b3 --- /dev/null +++ b/sdk/python/src/codex/vendor/README.md @@ -0,0 +1,2 @@ +Bundled Codex CLI binaries are placed in this directory under platform triples such as +`x86_64-apple-darwin/codex/codex`. The initial Python SDK scaffolding does not include binaries. diff --git a/sdk/python/tests/__init__.py b/sdk/python/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py new file mode 100644 index 00000000000..d70c17e7be6 --- /dev/null +++ b/sdk/python/tests/conftest.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import os +import subprocess +from pathlib import Path +from typing import Any, Callable, Dict, List + +import pytest + +from codex import Codex, CodexOptions +from codex.exec import INTERNAL_ORIGINATOR_ENV + +PROJECT_ROOT = Path(__file__).resolve().parents[3] +BINARY_NAME = "codex.exe" if os.name == "nt" else "codex" +CODEX_BIN = PROJECT_ROOT / "codex-rs" / "target" / "debug" / BINARY_NAME + + +@pytest.fixture(autouse=True) +def _reset_originator_env() -> None: + os.environ.pop(INTERNAL_ORIGINATOR_ENV, None) + + +@pytest.fixture(scope="session") +def codex_binary() -> Path: + if not CODEX_BIN.exists(): + pytest.skip("codex binary not built at target/debug/codex") + return CODEX_BIN + + +@pytest.fixture +def codex_client(codex_binary: Path) -> Callable[[str], Codex]: + def _make(base_url: str) -> Codex: + options = CodexOptions( + codex_path_override=str(codex_binary), + base_url=base_url, + api_key="test", + ) + return Codex(options) + + return _make + + +@pytest.fixture +def codex_exec_spy(monkeypatch: pytest.MonkeyPatch) -> List[Dict[str, Any]]: + calls: List[Dict[str, Any]] = [] + original_popen = subprocess.Popen + + def spy_popen(command, *args, **kwargs): # type: ignore[no-untyped-def] + calls.append({"command": command, "args": args, "kwargs": kwargs}) + return original_popen(command, *args, **kwargs) + + monkeypatch.setattr(subprocess, "Popen", spy_popen) + return calls diff --git a/sdk/python/tests/helpers.py b/sdk/python/tests/helpers.py new file mode 100644 index 00000000000..16f005f1e2a --- /dev/null +++ b/sdk/python/tests/helpers.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import http.server +import json +import threading +from dataclasses import dataclass, field +from typing import Any, Dict, List + + +DEFAULT_RESPONSE_ID = "resp_mock" +DEFAULT_MESSAGE_ID = "msg_mock" + + +@dataclass +class SseEvent: + type: str + payload: Dict[str, Any] = field(default_factory=dict) + + def format(self) -> str: + data = {"type": self.type, **self.payload} + return f"event: {self.type}\n" f"data: {json.dumps(data)}\n\n" + + +@dataclass +class SseBody: + events: List[SseEvent] + + +@dataclass +class ResponsesProxy: + url: str + close: Any + requests: List[Dict[str, Any]] + + +class _ProxyHandler(http.server.BaseHTTPRequestHandler): + server_version = "ResponsesProxy/1.0" + + def do_POST(self) -> None: # noqa: N802 # pragma: no cover - parsed indirectly + if self.path not in {"/responses", "/v1/responses"}: + self.server.other_requests.append( # type: ignore[attr-defined] + { # pragma: no cover - diagnostics + "method": self.command, + "path": self.path, + "headers": {k.lower(): v for k, v in self.headers.items()}, + } + ) + self.send_error(404) + return + + content_length = int(self.headers.get("content-length", "0")) + body = self.rfile.read(content_length) + json_body = json.loads(body) + headers = {key.lower(): value for key, value in self.headers.items()} + self.server.requests.append( # type: ignore[attr-defined] + { + "body": body.decode("utf-8"), + "json": json_body, + "headers": headers, + "path": self.path, + } + ) + + status_code = self.server.status_code # type: ignore[attr-defined] + self.send_response(status_code) + self.send_header("content-type", "text/event-stream") + self.end_headers() + + bodies: List[SseBody] = self.server.response_bodies # type: ignore[attr-defined] + index = self.server.response_index # type: ignore[attr-defined] + body_index = min(index, len(bodies) - 1) + self.server.response_index += 1 + + for event in bodies[body_index].events: + self.wfile.write(event.format().encode("utf-8")) + self.wfile.flush() + + def log_message(self, format: str, *args: Any) -> None: # pragma: no cover - quiet server + return + + +def _run_server(server: http.server.HTTPServer) -> None: + with server: # type: ignore[arg-type] + server.serve_forever(poll_interval=0.1) + + +def start_responses_proxy(bodies: List[SseBody], status_code: int = 200) -> ResponsesProxy: + requests: List[Dict[str, Any]] = [] + server = http.server.ThreadingHTTPServer(("127.0.0.1", 0), _ProxyHandler) + server.requests = requests # type: ignore[attr-defined] + server.other_requests = [] # type: ignore[attr-defined] + server.response_bodies = bodies # type: ignore[attr-defined] + server.response_index = 0 # type: ignore[attr-defined] + server.status_code = status_code # type: ignore[attr-defined] + + thread = threading.Thread(target=_run_server, args=(server,), daemon=True) + thread.start() + + host, port = server.server_address + url = f"http://{host}:{port}" + + def close() -> None: + server.shutdown() + thread.join() + + return ResponsesProxy(url=url, close=close, requests=requests) + + +def response_started(response_id: str = DEFAULT_RESPONSE_ID) -> SseEvent: + return SseEvent( + type="response.created", + payload={ + "response": { + "id": response_id, + } + }, + ) + + +def assistant_message(text: str, item_id: str = DEFAULT_MESSAGE_ID) -> SseEvent: + return SseEvent( + type="response.output_item.done", + payload={ + "item": { + "type": "message", + "role": "assistant", + "id": item_id, + "content": [ + { + "type": "output_text", + "text": text, + } + ], + } + }, + ) + + +def response_completed(response_id: str = DEFAULT_RESPONSE_ID) -> SseEvent: + return SseEvent( + type="response.completed", + payload={ + "response": { + "id": response_id, + "usage": { + "input_tokens": 42, + "input_tokens_details": {"cached_tokens": 12}, + "output_tokens": 5, + "output_tokens_details": None, + "total_tokens": 47, + }, + } + }, + ) + + +def response_failed(message: str) -> SseEvent: + return SseEvent( + type="error", + payload={"error": {"code": "rate_limit_exceeded", "message": message}}, + ) + + +def sse(*events: SseEvent) -> SseBody: + return SseBody(events=list(events)) diff --git a/sdk/python/tests/test_run.py b/sdk/python/tests/test_run.py new file mode 100644 index 00000000000..28fd3b94c5a --- /dev/null +++ b/sdk/python/tests/test_run.py @@ -0,0 +1,238 @@ +from __future__ import annotations + +import pytest +from pathlib import Path + +from pydantic import BaseModel + +from codex import ThreadOptions, TurnOptions +from codex.config import SandboxMode +from codex.exec import PYTHON_SDK_ORIGINATOR +from codex.thread import ThreadRunResult +from codex.exceptions import ThreadRunError + +from .helpers import ( + assistant_message, + response_completed, + response_failed, + response_started, + sse, + start_responses_proxy, +) + +def expect_pair(command: list[str], flag: str, expected: str) -> None: + index = command.index(flag) + assert command[index + 1] == expected + + +def test_returns_thread_events(codex_client) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Hi!"), response_completed())]) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + turn = thread.run("Hello, world!") + + assert isinstance(turn, ThreadRunResult) + assert thread.id is not None + assert turn.final_response == "Hi!" + assert turn.usage is not None + assert turn.usage.input_tokens == 42 + assert turn.items[0].type == "agent_message" + finally: + proxy.close() + + +def test_sends_previous_items_on_second_run(codex_client) -> None: + proxy = start_responses_proxy( + [ + sse(response_started("response_1"), assistant_message("First response", "item_1"), response_completed("response_1")), + sse(response_started("response_2"), assistant_message("Second response", "item_2"), response_completed("response_2")), + ] + ) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + thread.run("first input") + thread.run("second input") + + assert len(proxy.requests) >= 2 + second_request = proxy.requests[1] + assistant_entry = next(entry for entry in second_request["json"]["input"] if entry["role"] == "assistant") + assistant_text = next(content for content in assistant_entry["content"] if content["type"] == "output_text") + assert assistant_text["text"] == "First response" + finally: + proxy.close() + + +def test_continues_thread_when_run_called_twice_with_options(codex_client) -> None: + proxy = start_responses_proxy( + [ + sse(response_started("response_1"), assistant_message("First response", "item_1"), response_completed("response_1")), + sse(response_started("response_2"), assistant_message("Second response", "item_2"), response_completed("response_2")), + ] + ) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + thread.run("first input") + thread.run("second input") + + second_request = proxy.requests[1] + payload = second_request["json"] + user_entry = payload["input"][-1] + assert user_entry["role"] == "user" + assert user_entry["content"][0]["text"] == "second input" + assistant_entry = next(entry for entry in payload["input"] if entry["role"] == "assistant") + assistant_text = next(content for content in assistant_entry["content"] if content["type"] == "output_text") + assert assistant_text["text"] == "First response" + finally: + proxy.close() + + +def test_resumes_thread_by_id(codex_client) -> None: + proxy = start_responses_proxy( + [ + sse(response_started("response_1"), assistant_message("First response", "item_1"), response_completed("response_1")), + sse(response_started("response_2"), assistant_message("Second response", "item_2"), response_completed("response_2")), + ] + ) + try: + client = codex_client(proxy.url) + original_thread = client.start_thread() + original_thread.run("first input") + + assert original_thread.id is not None + resumed_thread = client.resume_thread(original_thread.id) + result = resumed_thread.run("second input") + + assert resumed_thread.id == original_thread.id + assert result.final_response == "Second response" + + second_request = proxy.requests[1] + assistant_entry = next(entry for entry in second_request["json"]["input"] if entry["role"] == "assistant") + assistant_text = next(content for content in assistant_entry["content"] if content["type"] == "output_text") + assert assistant_text["text"] == "First response" + finally: + proxy.close() + + +def test_thread_options_are_forwarded(codex_client, codex_exec_spy) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Options applied"), response_completed())]) + try: + client = codex_client(proxy.url) + thread = client.start_thread(ThreadOptions(model="gpt-test-1", sandbox_mode=SandboxMode.WORKSPACE_WRITE)) + thread.run("apply options") + + payload = proxy.requests[0]["json"] + assert payload.get("model") == "gpt-test-1" + + command = codex_exec_spy[0]["command"] + expect_pair(command, "--sandbox", "workspace-write") + expect_pair(command, "--model", "gpt-test-1") + finally: + proxy.close() + + +def test_structured_output_writes_temp_file(codex_client, codex_exec_spy) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Structured"), response_completed())]) + schema = { + "type": "object", + "properties": {"answer": {"type": "string"}}, + "required": ["answer"], + "additionalProperties": False, + } + try: + client = codex_client(proxy.url) + thread = client.start_thread() + thread.run("structured", TurnOptions(output_schema=schema)) + + payload = proxy.requests[0]["json"] + assert payload["text"]["format"]["schema"] == schema + + command = codex_exec_spy[0]["command"] + schema_flag_index = command.index("--output-schema") + schema_path = Path(command[schema_flag_index + 1]) + assert not schema_path.exists() + finally: + proxy.close() + + +def test_structured_output_accepts_pydantic_model(codex_client, codex_exec_spy) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Structured"), response_completed())]) + + class ResponseModel(BaseModel): + answer: str + + try: + client = codex_client(proxy.url) + thread = client.start_thread() + thread.run("structured", TurnOptions(output_schema=ResponseModel)) + + payload = proxy.requests[0]["json"] + schema = payload["text"]["format"]["schema"] + assert schema["type"] == "object" + assert schema["properties"]["answer"]["type"] == "string" + + command = codex_exec_spy[0]["command"] + schema_flag_index = command.index("--output-schema") + schema_path = Path(command[schema_flag_index + 1]) + assert not schema_path.exists() + finally: + proxy.close() + + +def test_sets_originator_header(codex_client) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Hi!"), response_completed())]) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + thread.run("Hello") + + headers = proxy.requests[0]["headers"] + assert headers.get("originator") == PYTHON_SDK_ORIGINATOR + finally: + proxy.close() + + +def test_thread_run_error_on_failure(codex_client) -> None: + proxy = start_responses_proxy([ + sse(response_started("resp_1")), + sse(response_failed("rate limit exceeded")), + ]) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + with pytest.raises(ThreadRunError): + thread.run("fail") + finally: + proxy.close() + + +def test_runs_in_provided_working_directory(codex_client, codex_exec_spy, tmp_path) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Working dir applied", "item_1"), response_completed())]) + working_dir = tmp_path / "codex-working-dir" + working_dir.mkdir() + try: + client = codex_client(proxy.url) + thread = client.start_thread(ThreadOptions(working_directory=str(working_dir), skip_git_repo_check=True)) + thread.run("use custom working directory") + + command = codex_exec_spy[0]["command"] + expect_pair(command, "--cd", str(working_dir)) + assert "--skip-git-repo-check" in command + finally: + proxy.close() + + +def test_requires_git_directory_unless_skipped(codex_client, tmp_path) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Working dir applied", "item_1"), response_completed())]) + working_dir = tmp_path / "codex-working-dir" + working_dir.mkdir() + try: + client = codex_client(proxy.url) + thread = client.start_thread(ThreadOptions(working_directory=str(working_dir))) + with pytest.raises(Exception) as exc_info: + thread.run("use custom working directory") + assert "Not inside a trusted directory" in str(exc_info.value) + finally: + proxy.close() diff --git a/sdk/python/tests/test_run_streamed.py b/sdk/python/tests/test_run_streamed.py new file mode 100644 index 00000000000..2d6b6c7c7cc --- /dev/null +++ b/sdk/python/tests/test_run_streamed.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +from codex import TurnOptions +from codex.events import ( + ItemCompletedEvent, + ThreadStartedEvent, + TurnCompletedEvent, + TurnStartedEvent, +) + +from .helpers import ( + assistant_message, + response_completed, + response_started, + sse, + start_responses_proxy, +) + + +def collect_events(stream) -> list: + events = list(stream) + return events + + +def test_returns_thread_events(codex_client) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Hi!"), response_completed())]) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + stream = thread.run_streamed("Hello, world!") + events = collect_events(stream) + + assert len(events) == 4 + assert isinstance(events[0], ThreadStartedEvent) + assert thread.id is not None + assert isinstance(events[1], TurnStartedEvent) + item_event = events[2] + assert isinstance(item_event, ItemCompletedEvent) + assert item_event.item.type == "agent_message" + assert item_event.item.text == "Hi!" + completed = events[3] + assert isinstance(completed, TurnCompletedEvent) + assert completed.usage.cached_input_tokens == 12 + finally: + proxy.close() + + +def test_sends_previous_items_on_streamed_run(codex_client) -> None: + proxy = start_responses_proxy( + [ + sse(response_started("response_1"), assistant_message("First response", "item_1"), response_completed("response_1")), + sse(response_started("response_2"), assistant_message("Second response", "item_2"), response_completed("response_2")), + ] + ) + try: + client = codex_client(proxy.url) + thread = client.start_thread() + first = thread.run_streamed("first input") + collect_events(first) + + second = thread.run_streamed("second input") + collect_events(second) + + second_request = proxy.requests[1] + assistant_entry = next(entry for entry in second_request["json"]["input"] if entry.get("role") == "assistant") + assistant_text = next(content for content in assistant_entry["content"] if content["type"] == "output_text") + assert assistant_text["text"] == "First response" + finally: + proxy.close() + + +def test_resumes_thread_by_id_when_streaming(codex_client) -> None: + proxy = start_responses_proxy( + [ + sse(response_started("response_1"), assistant_message("First response", "item_1"), response_completed("response_1")), + sse(response_started("response_2"), assistant_message("Second response", "item_2"), response_completed("response_2")), + ] + ) + try: + client = codex_client(proxy.url) + original_thread = client.start_thread() + collect_events(original_thread.run_streamed("first input")) + + assert original_thread.id is not None + resumed_thread = client.resume_thread(original_thread.id) + collect_events(resumed_thread.run_streamed("second input")) + + second_request = proxy.requests[1] + assistant_entry = next(entry for entry in second_request["json"]["input"] if entry.get("role") == "assistant") + assistant_text = next(content for content in assistant_entry["content"] if content["type"] == "output_text") + assert assistant_text["text"] == "First response" + finally: + proxy.close() + + +def test_applies_output_schema_when_streaming(codex_client) -> None: + proxy = start_responses_proxy([sse(response_started(), assistant_message("Structured"), response_completed())]) + schema = { + "type": "object", + "properties": {"answer": {"type": "string"}}, + "required": ["answer"], + "additionalProperties": False, + } + try: + client = codex_client(proxy.url) + thread = client.start_thread() + collect_events(thread.run_streamed("structured", TurnOptions(output_schema=schema))) + + payload = proxy.requests[0]["json"] + assert payload["text"]["format"]["schema"] == schema + finally: + proxy.close() diff --git a/sdk/python/uv.lock b/sdk/python/uv.lock new file mode 100644 index 00000000000..8245df7b4a1 --- /dev/null +++ b/sdk/python/uv.lock @@ -0,0 +1,270 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "openai-codex-sdk" +version = "0.0.1a0" +source = { editable = "." } + +[package.optional-dependencies] +dev = [ + { name = "mypy" }, + { name = "pydantic" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.18" }, + { name = "pydantic", marker = "extra == 'dev'", specifier = ">=2.7" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.4" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.5" }, +] +provides-extras = ["dev"] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760, upload-time = "2025-10-07T15:58:03.467Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730, upload-time = "2025-10-07T15:58:01.576Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315, upload-time = "2025-10-06T21:10:34.87Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298, upload-time = "2025-10-06T21:10:36.233Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797, upload-time = "2025-10-06T21:10:37.601Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921, upload-time = "2025-10-06T21:10:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767, upload-time = "2025-10-06T21:10:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062, upload-time = "2025-10-06T21:10:42.09Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819, upload-time = "2025-10-06T21:10:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267, upload-time = "2025-10-06T21:10:45.34Z" }, + { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927, upload-time = "2025-10-06T21:10:46.738Z" }, + { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703, upload-time = "2025-10-06T21:10:48.524Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719, upload-time = "2025-10-06T21:10:50.256Z" }, + { url = "https://files.pythonhosted.org/packages/27/8a/6d54198536a90a37807d31a156642aae7a8e1263ed9fe6fc6245defe9332/pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf", size = 2105825, upload-time = "2025-10-06T21:10:51.719Z" }, + { url = "https://files.pythonhosted.org/packages/4f/2e/4784fd7b22ac9c8439db25bf98ffed6853d01e7e560a346e8af821776ccc/pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb", size = 1910126, upload-time = "2025-10-06T21:10:53.145Z" }, + { url = "https://files.pythonhosted.org/packages/f3/92/31eb0748059ba5bd0aa708fb4bab9fcb211461ddcf9e90702a6542f22d0d/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669", size = 1961472, upload-time = "2025-10-06T21:10:55.754Z" }, + { url = "https://files.pythonhosted.org/packages/ab/91/946527792275b5c4c7dde4cfa3e81241bf6900e9fee74fb1ba43e0c0f1ab/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f", size = 2063230, upload-time = "2025-10-06T21:10:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/31/5d/a35c5d7b414e5c0749f1d9f0d159ee2ef4bab313f499692896b918014ee3/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4", size = 2229469, upload-time = "2025-10-06T21:10:59.409Z" }, + { url = "https://files.pythonhosted.org/packages/21/4d/8713737c689afa57ecfefe38db78259d4484c97aa494979e6a9d19662584/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62", size = 2347986, upload-time = "2025-10-06T21:11:00.847Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ec/929f9a3a5ed5cda767081494bacd32f783e707a690ce6eeb5e0730ec4986/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014", size = 2072216, upload-time = "2025-10-06T21:11:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/a33f459d4f9cc8786d9db42795dbecc84fa724b290d7d71ddc3d7155d46a/pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d", size = 2193047, upload-time = "2025-10-06T21:11:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/77/af/d5c6959f8b089f2185760a2779079e3c2c411bfc70ea6111f58367851629/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f", size = 2140613, upload-time = "2025-10-06T21:11:05.607Z" }, + { url = "https://files.pythonhosted.org/packages/58/e5/2c19bd2a14bffe7fabcf00efbfbd3ac430aaec5271b504a938ff019ac7be/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257", size = 2327641, upload-time = "2025-10-06T21:11:07.143Z" }, + { url = "https://files.pythonhosted.org/packages/93/ef/e0870ccda798c54e6b100aff3c4d49df5458fd64217e860cb9c3b0a403f4/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32", size = 2318229, upload-time = "2025-10-06T21:11:08.73Z" }, + { url = "https://files.pythonhosted.org/packages/b1/4b/c3b991d95f5deb24d0bd52e47bcf716098fa1afe0ce2d4bd3125b38566ba/pydantic_core-2.41.1-cp313-cp313-win32.whl", hash = "sha256:a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d", size = 1997911, upload-time = "2025-10-06T21:11:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/5c316fd62e01f8d6be1b7ee6b54273214e871772997dc2c95e204997a055/pydantic_core-2.41.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b", size = 2034301, upload-time = "2025-10-06T21:11:12.113Z" }, + { url = "https://files.pythonhosted.org/packages/29/41/902640cfd6a6523194123e2c3373c60f19006447f2fb06f76de4e8466c5b/pydantic_core-2.41.1-cp313-cp313-win_arm64.whl", hash = "sha256:ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb", size = 1977238, upload-time = "2025-10-06T21:11:14.1Z" }, + { url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc", size = 1875626, upload-time = "2025-10-06T21:11:15.69Z" }, + { url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67", size = 2045708, upload-time = "2025-10-06T21:11:17.258Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795", size = 1997171, upload-time = "2025-10-06T21:11:18.822Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/cec246429ddfa2778d2d6301eca5362194dc8749ecb19e621f2f65b5090f/pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:05226894a26f6f27e1deb735d7308f74ef5fa3a6de3e0135bb66cdcaee88f64b", size = 2107836, upload-time = "2025-10-06T21:11:20.432Z" }, + { url = "https://files.pythonhosted.org/packages/20/39/baba47f8d8b87081302498e610aefc37142ce6a1cc98b2ab6b931a162562/pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:85ff7911c6c3e2fd8d3779c50925f6406d770ea58ea6dde9c230d35b52b16b4a", size = 1904449, upload-time = "2025-10-06T21:11:22.185Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/9a3d87cae2c75a5178334b10358d631bd094b916a00a5993382222dbfd92/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47f1f642a205687d59b52dc1a9a607f45e588f5a2e9eeae05edd80c7a8c47674", size = 1961750, upload-time = "2025-10-06T21:11:24.348Z" }, + { url = "https://files.pythonhosted.org/packages/27/42/a96c9d793a04cf2a9773bff98003bb154087b94f5530a2ce6063ecfec583/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df11c24e138876ace5ec6043e5cae925e34cf38af1a1b3d63589e8f7b5f5cdc4", size = 2063305, upload-time = "2025-10-06T21:11:26.556Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8d/028c4b7d157a005b1f52c086e2d4b0067886b213c86220c1153398dbdf8f/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f0bf7f5c8f7bf345c527e8a0d72d6b26eda99c1227b0c34e7e59e181260de31", size = 2228959, upload-time = "2025-10-06T21:11:28.426Z" }, + { url = "https://files.pythonhosted.org/packages/08/f7/ee64cda8fcc9ca3f4716e6357144f9ee71166775df582a1b6b738bf6da57/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82b887a711d341c2c47352375d73b029418f55b20bd7815446d175a70effa706", size = 2345421, upload-time = "2025-10-06T21:11:30.226Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/e8ec05f0f5ee7a3656973ad9cd3bc73204af99f6512c1a4562f6fb4b3f7d/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5f1d5d6bbba484bdf220c72d8ecd0be460f4bd4c5e534a541bb2cd57589fb8b", size = 2065288, upload-time = "2025-10-06T21:11:32.019Z" }, + { url = "https://files.pythonhosted.org/packages/0a/25/d77a73ff24e2e4fcea64472f5e39b0402d836da9b08b5361a734d0153023/pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bf1917385ebe0f968dc5c6ab1375886d56992b93ddfe6bf52bff575d03662be", size = 2189759, upload-time = "2025-10-06T21:11:33.753Z" }, + { url = "https://files.pythonhosted.org/packages/66/45/4a4ebaaae12a740552278d06fe71418c0f2869537a369a89c0e6723b341d/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4f94f3ab188f44b9a73f7295663f3ecb8f2e2dd03a69c8f2ead50d37785ecb04", size = 2140747, upload-time = "2025-10-06T21:11:35.781Z" }, + { url = "https://files.pythonhosted.org/packages/da/6d/b727ce1022f143194a36593243ff244ed5a1eb3c9122296bf7e716aa37ba/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:3925446673641d37c30bd84a9d597e49f72eacee8b43322c8999fa17d5ae5bc4", size = 2327416, upload-time = "2025-10-06T21:11:37.75Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/02df9d8506c427787059f87c6c7253435c6895e12472a652d9616ee0fc95/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:49bd51cc27adb980c7b97357ae036ce9b3c4d0bb406e84fbe16fb2d368b602a8", size = 2318138, upload-time = "2025-10-06T21:11:39.463Z" }, + { url = "https://files.pythonhosted.org/packages/98/67/0cf429a7d6802536941f430e6e3243f6d4b68f41eeea4b242372f1901794/pydantic_core-2.41.1-cp314-cp314-win32.whl", hash = "sha256:a31ca0cd0e4d12ea0df0077df2d487fc3eb9d7f96bbb13c3c5b88dcc21d05159", size = 1998429, upload-time = "2025-10-06T21:11:41.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/60/742fef93de5d085022d2302a6317a2b34dbfe15258e9396a535c8a100ae7/pydantic_core-2.41.1-cp314-cp314-win_amd64.whl", hash = "sha256:1b5c4374a152e10a22175d7790e644fbd8ff58418890e07e2073ff9d4414efae", size = 2028870, upload-time = "2025-10-06T21:11:43.66Z" }, + { url = "https://files.pythonhosted.org/packages/31/38/cdd8ccb8555ef7720bd7715899bd6cfbe3c29198332710e1b61b8f5dd8b8/pydantic_core-2.41.1-cp314-cp314-win_arm64.whl", hash = "sha256:4fee76d757639b493eb600fba668f1e17475af34c17dd61db7a47e824d464ca9", size = 1974275, upload-time = "2025-10-06T21:11:45.476Z" }, + { url = "https://files.pythonhosted.org/packages/e7/7e/8ac10ccb047dc0221aa2530ec3c7c05ab4656d4d4bd984ee85da7f3d5525/pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f9b9c968cfe5cd576fdd7361f47f27adeb120517e637d1b189eea1c3ece573f4", size = 1875124, upload-time = "2025-10-06T21:11:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e4/7d9791efeb9c7d97e7268f8d20e0da24d03438a7fa7163ab58f1073ba968/pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ebc7ab67b856384aba09ed74e3e977dded40e693de18a4f197c67d0d4e6d8e", size = 2043075, upload-time = "2025-10-06T21:11:49.542Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c3/3f6e6b2342ac11ac8cd5cb56e24c7b14afa27c010e82a765ffa5f771884a/pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8ae0dc57b62a762985bc7fbf636be3412394acc0ddb4ade07fe104230f1b9762", size = 1995341, upload-time = "2025-10-06T21:11:51.497Z" }, + { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, + { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, + { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, + { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, + { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, + { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +]