diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bb15a0e..0fb87ec 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,11 @@ -name: Testing package +name: Testing taskiq-valkey -on: push +on: + push: + branches: + - main + - develop + pull_request: jobs: lint: @@ -32,10 +37,11 @@ jobs: strategy: matrix: py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - os: [ubuntu-latest, windows-latest] - runs-on: "${{ matrix.os }}" + runs-on: "ubuntu-latest" steps: - uses: actions/checkout@v4 + - name: Set up Valkey instance and Valkey cluster + run: docker compose up -d - name: Install poetry run: pipx install poetry - name: Set up Python @@ -51,7 +57,7 @@ jobs: run: poetry run coverage xml - name: Upload coverage reports to Codecov with GitHub Action uses: codecov/codecov-action@v3 - if: matrix.os == 'ubuntu-latest' && matrix.py_version == '3.11' + if: matrix.py_version == '3.11' with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5aaef28..e40a1be 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 + rev: v5.0.0 hooks: - id: check-ast - id: trailing-whitespace diff --git a/README.md b/README.md index 7842522..2274fba 100644 --- a/README.md +++ b/README.md @@ -1 +1,134 @@ -# taskiq_valkey +# TaskIQ-Valkey + +Taskiq-valkey is a plugin for taskiq that adds a new broker and result backend based on valkey. + +# Installation + +To use this project you must have installed core taskiq library: +```bash +pip install taskiq +``` +This project can be installed using pip: +```bash +pip install taskiq-valkey +``` + +# Usage + +Let's see the example with the valkey broker and valkey async result: + +```python +# broker.py +import asyncio + +from taskiq_valkey import ValkeyAsyncResultBackend, ValkeyStreamBroker + +result_backend = ValkeyAsyncResultBackend( + valkey_url="valkey://localhost:6379", +) + +# Or you can use PubSubBroker if you need broadcasting +broker = ValkeyStreamBroker( + valkey_url="valkey://localhost:6379", +).with_result_backend(result_backend) + + +@broker.task +async def best_task_ever() -> None: + """Solve all problems in the world.""" + await asyncio.sleep(5.5) + print("All problems are solved!") + + +async def main(): + task = await best_task_ever.kiq() + print(await task.wait_result()) + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +Launch the workers: +`taskiq worker broker:broker` +Then run the main code: +`python3 broker.py` + + +## Brokers + +This package contains 6 broker implementations. We have two broker types: `PubSub` and `Stream`. + +Each of type is implemented for each valkey architecture: +* Single node +* Cluster +* Sentinel + +Here's a small breakdown of how they differ from eachother. + + +### PubSub + +By default on old valkey versions PUBSUB was the way of making valkey into a queue. +But using PUBSUB means that all messages delivered to all subscribed consumers. + +> [!WARNING] +> This broker doesn't support acknowledgements. If during message processing +> Worker was suddenly killed the message is going to be lost. + +### Stream + +Stream brokers use valkey [stream type](https://valkey.io/topics/streams-intro/) to store and fetch messages. + +> [!TIP] +> This broker **supports** acknowledgements and therefore is fine to use in cases when data durability is +> required. + +## ValkeyAsyncResultBackend configuration + +ValkeyAsyncResultBackend parameters: +* `valkey_url` - url to valkey. +* `keep_results` - flag to not remove results from Valkey after reading. +* `result_ex_time` - expire time in seconds (by default - not specified) +* `result_px_time` - expire time in milliseconds (by default - not specified) +* Any other keyword arguments are passed to `valkey.asyncio.BlockingConnectionPool`. + Notably, you can use `timeout` to set custom timeout in seconds for reconnects + (or set it to `None` to try reconnects indefinitely). + +> [!WARNING] +> **It is highly recommended to use expire time in ValkeyAsyncResultBackend** +> If you want to add expiration, either `result_ex_time` or `result_px_time` must be set. +> ```python +> # First variant +> valkey_async_result = ValkeyAsyncResultBackend( +> valkey_url="valkey://localhost:6379", +> result_ex_time=1000, +> ) +> +> # Second variant +> valkey_async_result = ValkeyAsyncResultBackend( +> valkey_url="valkey://localhost:6379", +> result_px_time=1000000, +> ) +> ``` + + +## Schedule sources + + +You can use this package to add dynamic schedule sources. They are used to store +schedules for taskiq scheduler. + +The advantage of using schedule sources from this package over default `LabelBased` source is that you can +dynamically add schedules in it. + +For now we have only one type of schedules - `ListValkeyScheduleSource`. + +### ListValkeyScheduleSource + +This source holds values in lists. + +* For cron tasks it uses key `{prefix}:cron`. +* For timed schedules it uses key `{prefix}:time:{time}` where `{time}` is actually time where schedules should run. + +The main advantage of this approach is that we only fetch tasks we need to run at a given time and do not perform any excesive calls to valkey. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8f5b2ac --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,85 @@ +services: + valkey: + image: bitnami/valkey:7.2.7 + environment: + ALLOW_EMPTY_PASSWORD: "yes" + healthcheck: + test: ["CMD", "valkey-cli", "ping"] + interval: 5s + timeout: 5s + retries: 3 + start_period: 10s + ports: + - 7000:6379 + valkey-node-0: &valkey-node + image: docker.io/bitnami/valkey-cluster:7.2.7 + environment: + ALLOW_EMPTY_PASSWORD: "yes" + VALKEY_NODES: "valkey-node-0 valkey-node-1 valkey-node-2 valkey-node-3 valkey-node-4 valkey-node-5" + healthcheck: + test: ["CMD", "valkey-cli", "ping"] + interval: 5s + timeout: 5s + retries: 3 + start_period: 10s + + valkey-node-1: + <<: *valkey-node + + valkey-node-2: + <<: *valkey-node + + valkey-node-3: + <<: *valkey-node + + valkey-node-4: + <<: *valkey-node + + valkey-node-5: + image: docker.io/bitnami/valkey-cluster:7.2.7 + depends_on: + - valkey-node-0 + - valkey-node-1 + - valkey-node-2 + - valkey-node-3 + - valkey-node-4 + environment: + ALLOW_EMPTY_PASSWORD: "yes" + VALKEY_NODES: "valkey-node-0 valkey-node-1 valkey-node-2 valkey-node-3 valkey-node-4 valkey-node-5" + VALKEY_CLUSTER_REPLICAS: 1 + VALKEY_CLUSTER_CREATOR: "yes" + healthcheck: + test: ["CMD", "valkey-cli", "ping"] + interval: 5s + timeout: 5s + retries: 3 + start_period: 10s + ports: + - 7001:6379 + + valkey-master: + image: bitnami/valkey:7.2.7 + environment: + ALLOW_EMPTY_PASSWORD: "yes" + healthcheck: + test: ["CMD", "valkey-cli", "ping"] + interval: 5s + timeout: 5s + retries: 3 + start_period: 10s + + valkey-sentinel: + image: bitnami/valkey-sentinel:7.2.7 + depends_on: + - valkey-master + environment: + ALLOW_EMPTY_PASSWORD: "yes" + VALKEY_MASTER_HOST: "valkey-master" + healthcheck: + test: ["CMD", "valkey-cli", "-p", "26379", "ping"] + interval: 5s + timeout: 5s + retries: 3 + start_period: 10s + ports: + - 7002:26379 diff --git a/poetry.lock b/poetry.lock index fe5b24e..1cb522a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -14,14 +14,14 @@ files = [ [[package]] name = "anyio" -version = "4.8.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -31,10 +31,23 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11.3\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + [[package]] name = "black" version = "25.1.0" @@ -124,82 +137,82 @@ files = [ [[package]] name = "coverage" -version = "7.6.12" +version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, - {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, - {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, - {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, - {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, - {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, - {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, - {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, - {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, - {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, - {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, - {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, - {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, - {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, - {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, - {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, - {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "distlib" @@ -246,31 +259,46 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.17.0" +version = "3.18.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, - {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "freezegun" +version = "1.5.1" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, + {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" [[package]] name = "identify" -version = "2.6.8" +version = "2.6.10" description = "File identification library for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, - {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, + {file = "identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25"}, + {file = "identify-2.6.10.tar.gz", hash = "sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8"}, ] [package.extras] @@ -282,7 +310,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -293,62 +321,65 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.6.1" +version = "8.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] name = "izulu" -version = "0.5.4" +version = "0.50.0" description = "The exceptional library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "izulu-0.5.4-py3-none-any.whl", hash = "sha256:6431499a04f68daca0b852dfa5cfbcb7be804166bcdc4efd4dd4e6dd7a3e5898"}, - {file = "izulu-0.5.4.tar.gz", hash = "sha256:a6619402ab3c04ca32bbfb5000138287691e0b47d9794ba55a10af403ed23644"}, + {file = "izulu-0.50.0-py3-none-any.whl", hash = "sha256:4e9ae2508844e7c5f62c468a8b9e2deba2f60325ef63f01e65b39fd9a6b3fab4"}, + {file = "izulu-0.50.0.tar.gz", hash = "sha256:cc8e252d5e8560c70b95380295008eeb0786f7b745a405a40d3556ab3252d5f5"}, ] +[package.extras] +compatibility = ["typing-extensions (>=4.5.0)"] + [[package]] name = "mock" -version = "5.1.0" +version = "5.2.0" description = "Rolling backport of unittest.mock for all Pythons" optional = false python-versions = ">=3.6" groups = ["dev"] files = [ - {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"}, - {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"}, + {file = "mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f"}, + {file = "mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0"}, ] [package.extras] @@ -412,14 +443,14 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] @@ -436,14 +467,14 @@ files = [ [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -460,20 +491,20 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" @@ -493,14 +524,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "4.1.0" +version = "4.2.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, - {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, ] [package.dependencies] @@ -548,133 +579,133 @@ files = [ [[package]] name = "pydantic" -version = "2.10.6" +version = "2.11.3" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" +pydantic-core = "2.33.1" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] [package.dependencies] @@ -682,14 +713,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -703,16 +734,36 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.26.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, + {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" +typing-extensions = {version = ">=4.12", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "pytest-cov" -version = "6.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, ] [package.dependencies] @@ -744,16 +795,31 @@ psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pytz" -version = "2025.1" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -819,13 +885,53 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "ruff" +version = "0.11.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.11.7-py3-none-linux_armv6l.whl", hash = "sha256:d29e909d9a8d02f928d72ab7837b5cbc450a5bdf578ab9ebee3263d0a525091c"}, + {file = "ruff-0.11.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dd1fb86b168ae349fb01dd497d83537b2c5541fe0626e70c786427dd8363aaee"}, + {file = "ruff-0.11.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d3d7d2e140a6fbbc09033bce65bd7ea29d6a0adeb90b8430262fbacd58c38ada"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4809df77de390a1c2077d9b7945d82f44b95d19ceccf0c287c56e4dc9b91ca64"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3a0c2e169e6b545f8e2dba185eabbd9db4f08880032e75aa0e285a6d3f48201"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49b888200a320dd96a68e86736cf531d6afba03e4f6cf098401406a257fcf3d6"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2b19cdb9cf7dae00d5ee2e7c013540cdc3b31c4f281f1dacb5a799d610e90db4"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64e0ee994c9e326b43539d133a36a455dbaab477bc84fe7bfbd528abe2f05c1e"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bad82052311479a5865f52c76ecee5d468a58ba44fb23ee15079f17dd4c8fd63"}, + {file = "ruff-0.11.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7940665e74e7b65d427b82bffc1e46710ec7f30d58b4b2d5016e3f0321436502"}, + {file = "ruff-0.11.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:169027e31c52c0e36c44ae9a9c7db35e505fee0b39f8d9fca7274a6305295a92"}, + {file = "ruff-0.11.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:305b93f9798aee582e91e34437810439acb28b5fc1fee6b8205c78c806845a94"}, + {file = "ruff-0.11.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a681db041ef55550c371f9cd52a3cf17a0da4c75d6bd691092dfc38170ebc4b6"}, + {file = "ruff-0.11.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:07f1496ad00a4a139f4de220b0c97da6d4c85e0e4aa9b2624167b7d4d44fd6b6"}, + {file = "ruff-0.11.7-py3-none-win32.whl", hash = "sha256:f25dfb853ad217e6e5f1924ae8a5b3f6709051a13e9dad18690de6c8ff299e26"}, + {file = "ruff-0.11.7-py3-none-win_amd64.whl", hash = "sha256:0a931d85959ceb77e92aea4bbedfded0a31534ce191252721128f77e5ae1f98a"}, + {file = "ruff-0.11.7-py3-none-win_arm64.whl", hash = "sha256:778c1e5d6f9e91034142dfd06110534ca13220bfaad5c3735f6cb844654f6177"}, + {file = "ruff-0.11.7.tar.gz", hash = "sha256:655089ad3224070736dc32844fde783454f8558e71f501cb207485fe4eee23d4"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + [[package]] name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -833,20 +939,20 @@ files = [ [[package]] name = "taskiq" -version = "0.11.13" +version = "0.11.17" description = "Distributed task queue with full async support" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "taskiq-0.11.13-py3-none-any.whl", hash = "sha256:df4db3c2d43b33360b1de18d9d7195397e553fa45b7a559be26236f08cecddad"}, - {file = "taskiq-0.11.13.tar.gz", hash = "sha256:6d6d14807921b8ffa95a8418f992cf30edb9d8f9557c5f737c82ecaab8c387ea"}, + {file = "taskiq-0.11.17-py3-none-any.whl", hash = "sha256:a01fe1fc9c646f71113d0b886761a5e5253a35e625491d62e3379a14a99563b7"}, + {file = "taskiq-0.11.17.tar.gz", hash = "sha256:9c1c402beea452e8e834c53494035d653499d044ef1c7e6250c8fb7b31e52165"}, ] [package.dependencies] anyio = ">=3" importlib-metadata = "*" -izulu = "0.5.4" +izulu = "0.50.0" packaging = ">=19" pycron = ">=3.0.0,<4.0.0" pydantic = ">=1.0,<=3.0" @@ -860,7 +966,7 @@ metrics = ["prometheus_client (>=0,<1)"] msgpack = ["msgpack (>=1.0.7,<2.0.0)"] orjson = ["orjson (>=3,<4)"] reload = ["gitignore-parser (>=0,<1)", "watchdog (>=4,<5)"] -uv = ["uvloop (>=0.16.0,<1)"] +uv = ["uvloop (>=0.16.0,<1) ; sys_platform != \"win32\""] zmq = ["pyzmq (>=26,<27)"] [[package]] @@ -923,38 +1029,72 @@ files = [ [[package]] name = "types-mock" -version = "5.1.0.20240425" +version = "5.2.0.20250306" description = "Typing stubs for mock" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types-mock-5.1.0.20240425.tar.gz", hash = "sha256:5281a645d72e827d70043e3cc144fe33b1c003db084f789dc203aa90e812a5a4"}, - {file = "types_mock-5.1.0.20240425-py3-none-any.whl", hash = "sha256:d586a01d39ad919d3ddcd73de6cde73ca7f3c69707219f722d1b8d7733641ad7"}, + {file = "types_mock-5.2.0.20250306-py3-none-any.whl", hash = "sha256:eb69fec98b8de26be1d7121623d05a2f117d1ea2e01dd30c123d07d204a15c95"}, + {file = "types_mock-5.2.0.20250306.tar.gz", hash = "sha256:15882cb5cf9980587a7607e31890801223801d7997f559686805ce09b6536087"}, ] [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "valkey" +version = "6.1.0" +description = "Python client for Valkey forked from redis-py" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "valkey-6.1.0-py3-none-any.whl", hash = "sha256:cfe769edae894f74ac946eff1e93f7d7f466032c3030ba7e9d089a742459ac9c"}, + {file = "valkey-6.1.0.tar.gz", hash = "sha256:a652df15ed89c41935ffae6dfd09c56f4a9ab80b592e5ed9204d538e2ddad6d3"}, ] +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.3\""} + +[package.extras] +libvalkey = ["libvalkey (>=4.0.1)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, - {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, ] [package.dependencies] @@ -964,7 +1104,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "zipp" @@ -979,14 +1119,14 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "e603bcea15f4be49b1d8c3e076c66bbbc4ef3e2f3c4c3d2b39a5e9aa601f339b" +content-hash = "820a48248408de7a9d5a8040aa9c3080525312fa0a67a8c9e18b36c08e91e48a" diff --git a/pyproject.toml b/pyproject.toml index af15f82..5e315bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,24 +21,37 @@ classifiers = [ "Topic :: System :: Networking", "Development Status :: 3 - Alpha", ] -keywords = ["taskiq", "tasks", "distributed", "async"] +keywords = [ + "taskiq", + "tasks", + "distributed", + "async", + "valkey", + "result_backend", +] packages = [{ include = "taskiq_valkey" }] [tool.poetry.dependencies] python = "^3.9" -taskiq = "^0" +taskiq = ">=0.11.12,<1" +valkey = "^6.1.0" [tool.poetry.group.dev.dependencies] +# test utils pytest = "^8" -mypy = "^1" -pre-commit = "^4" -coverage = "^7" pytest-cov = "^6" -mock = "^5" -anyio = "^4" +pytest-asyncio = "^0.26.0" pytest-xdist = { version = "^3", extras = ["psutil"] } +coverage = "^7" +mock = "^5" +freezegun = "^1.5.1" +# type checks +mypy = "^1" +pre-commit = "^4" types-mock = "^5" +# linters and formatters black = "^25" +ruff = "^0" [tool.mypy] strict = true @@ -120,3 +133,26 @@ allow-magic-value-types = ["int", "str", "float"] [tool.ruff.lint.flake8-bugbear] extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"] + + +[tool.pytest.ini_options] +pythonpath = [ + "." +] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.coverage.report] +exclude_lines = [ + "# pragma: no cover", + "def __repr__", + "def __str__", + "def __unicode__", +] +show_missing = true +skip_empty = true +omit = [ + "*/__init__.py", + "venv/*", + "tests/*" +] diff --git a/taskiq_valkey/__init__.py b/taskiq_valkey/__init__.py index 4cc73ac..31a20f3 100644 --- a/taskiq_valkey/__init__.py +++ b/taskiq_valkey/__init__.py @@ -1 +1,41 @@ -"""Project was generated using https://github.com/taskiq-python/project-template/.""" +"""Package for valkey integration.""" + +from taskiq_valkey.list_schedule_source import ListValkeyScheduleSource +from taskiq_valkey.schedule_source import ( + ValkeyClusterScheduleSource, + ValkeySentinelScheduleSource, +) +from taskiq_valkey.valkey_backend import ( + ValkeyAsyncClusterResultBackend, + ValkeyAsyncResultBackend, + ValkeyAsyncSentinelResultBackend, +) +from taskiq_valkey.valkey_broker import ( + PubSubBroker, + ValkeyStreamBroker, +) +from taskiq_valkey.valkey_cluster_broker import ( + ListQueueClusterBroker, + ValkeyStreamClusterBroker, +) +from taskiq_valkey.valkey_sentinel_broker import ( + ListQueueSentinelBroker, + PubSubSentinelBroker, + ValkeyStreamSentinelBroker, +) + +__all__ = [ + "ListQueueClusterBroker", + "ListQueueSentinelBroker", + "ListValkeyScheduleSource", + "PubSubBroker", + "PubSubSentinelBroker", + "ValkeyAsyncClusterResultBackend", + "ValkeyAsyncResultBackend", + "ValkeyAsyncSentinelResultBackend", + "ValkeyClusterScheduleSource", + "ValkeySentinelScheduleSource", + "ValkeyStreamBroker", + "ValkeyStreamClusterBroker", + "ValkeyStreamSentinelBroker", +] diff --git a/taskiq_valkey/exceptions.py b/taskiq_valkey/exceptions.py new file mode 100644 index 0000000..c740f0b --- /dev/null +++ b/taskiq_valkey/exceptions.py @@ -0,0 +1,23 @@ +from taskiq.exceptions import ResultBackendError, ResultGetError, TaskiqError + + +class TaskIQValkeyError(TaskiqError): + """Base error for all taskiq-valkey exceptions.""" + + +class DuplicateExpireTimeSelectedError(ResultBackendError, TaskIQValkeyError): + """Error if two lifetimes are selected.""" + + __template__ = "Choose either result_ex_time or result_px_time." + + +class ExpireTimeMustBeMoreThanZeroError(ResultBackendError, TaskIQValkeyError): + """Error if two lifetimes are less or equal zero.""" + + __template__ = ( + "You must select one expire time param and it must be more than zero." + ) + + +class ResultIsMissingError(TaskIQValkeyError, ResultGetError): + """Error if there is no result when trying to get it.""" diff --git a/taskiq_valkey/list_schedule_source.py b/taskiq_valkey/list_schedule_source.py new file mode 100644 index 0000000..7ac1582 --- /dev/null +++ b/taskiq_valkey/list_schedule_source.py @@ -0,0 +1,240 @@ +import datetime +from logging import getLogger +from typing import Any, List, Optional + +from taskiq import ScheduledTask, ScheduleSource +from taskiq.abc.serializer import TaskiqSerializer +from taskiq.compat import model_dump, model_validate +from taskiq.serializers import PickleSerializer +from typing_extensions import Self +from valkey.asyncio import BlockingConnectionPool, Valkey + +logger = getLogger("taskiq.valkey_schedule_source") + + +class ListValkeyScheduleSource(ScheduleSource): + """Schedule source based on arrays.""" + + def __init__( + self, + url: str, + prefix: str = "schedule", + max_connection_pool_size: Optional[int] = None, + serializer: Optional[TaskiqSerializer] = None, + buffer_size: int = 50, + skip_past_schedules: bool = False, + **connection_kwargs: Any, + ) -> None: + """ + Create a new schedule source. + + :param url: Valkey URL + :param prefix: Prefix for all the keys + :param max_connection_pool_size: Maximum size of the connection pool + :param serializer: Serializer to use for the schedules + :param buffer_size: Buffer size for getting schedules + :param skip_past_schedules: Skip schedules that are in the past. + :param connection_kwargs: Additional connection kwargs + """ + super().__init__() + self._prefix = prefix + self._buffer_size = buffer_size + self._connection_pool = BlockingConnectionPool.from_url( + url=url, + max_connections=max_connection_pool_size, + **connection_kwargs, + ) + if serializer is None: + serializer = PickleSerializer() + self._serializer = serializer + self._is_first_run = True + self._previous_schedule_source: Optional[ScheduleSource] = None + self._delete_schedules_after_migration: bool = True + self._skip_past_schedules = skip_past_schedules + + async def startup(self) -> None: + """ + Startup the schedule source. + + By default this function does nothing. + But if the previous schedule source is set, + it will try to migrate schedules from it. + """ + if self._previous_schedule_source is not None: + logger.info("Migrating schedules from previous source") + await self._previous_schedule_source.startup() + schedules = await self._previous_schedule_source.get_schedules() + logger.info(f"Found {len(schedules)}") + for schedule in schedules: + await self.add_schedule(schedule) + if self._delete_schedules_after_migration: + await self._previous_schedule_source.delete_schedule( + schedule.schedule_id, + ) + await self._previous_schedule_source.shutdown() + logger.info("Migration complete") + + def _get_time_key(self, time: datetime.datetime) -> str: + """Get the key for a time-based schedule.""" + if time.tzinfo is None: + time = time.replace(tzinfo=datetime.timezone.utc) + iso_time = time.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M") + return f"{self._prefix}:time:{iso_time}" + + def _get_cron_key(self) -> str: + """Get the key for a cron-based schedule.""" + return f"{self._prefix}:cron" + + def _get_data_key(self, schedule_id: str) -> str: + """Get the key for a schedule data.""" + return f"{self._prefix}:data:{schedule_id}" + + def _parse_time_key(self, key: str) -> Optional[datetime.datetime]: + """Get time value from the timed-key.""" + try: + dt_str = key.split(":", 2)[2] + return datetime.datetime.strptime(dt_str, "%Y-%m-%dT%H:%M").replace( + tzinfo=datetime.timezone.utc, + ) + except ValueError: + logger.debug("Failed to parse time key %s", key) + return None + + async def _get_previous_time_schedules(self) -> list[bytes]: + """ + Function that gets all timed schedules that are in the past. + + Since this source doesn't retrieve all the schedules at once, + we need to get all the schedules that are in the past and haven't + been sent yet. + + We do this by getting all the time keys and checking if the time + is less than the current time. + + This function is called only during the first run to minimize + the number of requests to the Valkey server. + """ + logger.info("Getting previous time schedules") + minute_before = datetime.datetime.now( + datetime.timezone.utc, + ).replace(second=0, microsecond=0) - datetime.timedelta( + minutes=1, + ) + schedules = [] + async with Valkey(connection_pool=self._connection_pool) as valkey: + time_keys: list[str] = [] + # We need to get all the time keys and check if the time is less than + # the current time. + async for key in valkey.scan_iter(f"{self._prefix}:time:*"): + key_time = self._parse_time_key(key.decode()) + if key_time and key_time <= minute_before: + time_keys.append(key.decode()) + for key in time_keys: + schedules.extend(await valkey.lrange(key, 0, -1)) # type: ignore + + return schedules + + async def delete_schedule(self, schedule_id: str) -> None: + """Delete a schedule from the source.""" + async with Valkey(connection_pool=self._connection_pool) as valkey: + schedule = await valkey.getdel(self._get_data_key(schedule_id)) + if schedule is not None: + logger.debug("Deleting schedule %s", schedule_id) + schedule = model_validate( + ScheduledTask, + self._serializer.loadb(schedule), + ) + # We need to remove the schedule from the cron or time list. + if schedule.cron is not None: + await valkey.lrem(self._get_cron_key(), 0, schedule_id) # type: ignore + elif schedule.time is not None: + time_key = self._get_time_key(schedule.time) + await valkey.lrem(time_key, 0, schedule_id) # type: ignore + + async def add_schedule(self, schedule: "ScheduledTask") -> None: + """Add a schedule to the source.""" + async with Valkey(connection_pool=self._connection_pool) as valkey: + # At first we set data key which contains the schedule data. + await valkey.set( + f"{self._prefix}:data:{schedule.schedule_id}", + self._serializer.dumpb(model_dump(schedule)), + ) + # Then we add the schedule to the cron or time list. + # This is an optimization, so we can get all the schedules + # for the current time much faster. + if schedule.cron is not None: + await valkey.rpush(self._get_cron_key(), schedule.schedule_id) # type: ignore + elif schedule.time is not None: + await valkey.rpush( # type: ignore + self._get_time_key(schedule.time), + schedule.schedule_id, + ) + + async def post_send(self, task: ScheduledTask) -> None: + """Delete a task after it's completed.""" + if task.time is not None: + await self.delete_schedule(task.schedule_id) + + async def get_schedules(self) -> List["ScheduledTask"]: + """ + Get all schedules. + + This function gets all the schedules from the schedule source. + What it does is get all the cron schedules and time schedules + for the current time and return them. + + If it's the first run, it also gets all the time schedules + that are in the past and haven't been sent yet. + """ + schedules = [] + current_time = datetime.datetime.now(datetime.timezone.utc) + timed: list[bytes] = [] + # Only during first run, we need to get previous time schedules + if not self._skip_past_schedules: + timed = await self._get_previous_time_schedules() + self._is_first_run = False + async with Valkey(connection_pool=self._connection_pool) as valkey: + buffer = [] + crons = await valkey.lrange(self._get_cron_key(), 0, -1) # type: ignore + logger.debug("Got %d cron schedules", len(crons)) + if crons: + buffer.extend(crons) + timed.extend(await valkey.lrange(self._get_time_key(current_time), 0, -1)) # type: ignore + logger.debug("Got %d timed schedules", len(timed)) + if timed: + buffer.extend(timed) + while buffer: + schedules.extend( + await valkey.mget( + ( + self._get_data_key(x.decode()) + for x in buffer[: self._buffer_size] + ), + ), + ) + buffer = buffer[self._buffer_size :] + + return [ + model_validate(ScheduledTask, self._serializer.loadb(schedule)) + for schedule in schedules + if schedule + ] + + def with_migrate_from( + self, + source: ScheduleSource, + delete_schedules: bool = True, + ) -> Self: + """ + Enable migration from previous schedule source. + + If this function is called during declaration, + the source will try to migrate schedules from the previous source. + + :param source: previous schedule source + :param delete_schedules: delete schedules during migration process + from the previous source. + """ + self._previous_schedule_source = source + self._delete_schedules_after_migration = delete_schedules + return self diff --git a/taskiq_valkey/py.typed b/taskiq_valkey/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/taskiq_valkey/schedule_source.py b/taskiq_valkey/schedule_source.py new file mode 100644 index 0000000..a930b10 --- /dev/null +++ b/taskiq_valkey/schedule_source.py @@ -0,0 +1,203 @@ +import sys +from contextlib import asynccontextmanager +from typing import TYPE_CHECKING, Any, AsyncIterator, List, Optional, Tuple + +from taskiq import ScheduleSource +from taskiq.abc.serializer import TaskiqSerializer +from taskiq.compat import model_dump, model_validate +from taskiq.scheduler.scheduled_task import ScheduledTask +from taskiq.serializers import PickleSerializer +from valkey.asyncio import ( + BlockingConnectionPool, + Connection, + Sentinel, + Valkey, + ValkeyCluster, +) + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + _Valkey: TypeAlias = Valkey[bytes] # type: ignore + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore +else: + _Valkey: TypeAlias = Valkey + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool # noqa: PYI047 + + +class ValkeyClusterScheduleSource(ScheduleSource): + """ + Source of schedules for valkey cluster. + + This class allows you to store schedules in valkey. + Also it supports dynamic schedules. + + :param url: url to valkey cluster. + :param prefix: prefix for valkey schedule keys. + :param buffer_size: buffer size for valkey scan. + This is how many keys will be fetched at once. + :param max_connection_pool_size: maximum number of connections in pool. + :param serializer: serializer for data. + :param connection_kwargs: additional arguments for ValkeyCluster. + """ + + def __init__( + self, + url: str, + prefix: str = "schedule", + serializer: Optional[TaskiqSerializer] = None, + **connection_kwargs: Any, + ) -> None: + self.prefix = prefix + self.valkey: "ValkeyCluster" = ValkeyCluster.from_url( + url, + **connection_kwargs, + ) + if serializer is None: + serializer = PickleSerializer() + self.serializer = serializer + + async def delete_schedule(self, schedule_id: str) -> None: + """Remove schedule by id.""" + await self.valkey.delete(f"{self.prefix}:{schedule_id}") + + async def add_schedule(self, schedule: ScheduledTask) -> None: + """ + Add schedule to valkey. + + :param schedule: schedule to add. + :param schedule_id: schedule id. + """ + await self.valkey.set( + f"{self.prefix}:{schedule.schedule_id}", + self.serializer.dumpb(model_dump(schedule)), + ) + + async def get_schedules(self) -> List[ScheduledTask]: + """ + Get all schedules from valkey. + + This method is used by scheduler to get all schedules. + + :return: list of schedules. + """ + schedules = [] + async for key in self.valkey.scan_iter(f"{self.prefix}:*"): + raw_schedule = await self.valkey.get(key) + parsed_schedule = model_validate( + ScheduledTask, + self.serializer.loadb(raw_schedule), + ) + schedules.append(parsed_schedule) + return schedules + + async def post_send(self, task: ScheduledTask) -> None: + """Delete a task after it's completed.""" + if task.time is not None: + await self.delete_schedule(task.schedule_id) + + async def shutdown(self) -> None: + """Shut down the schedule source.""" + await self.valkey.aclose() + + +class ValkeySentinelScheduleSource(ScheduleSource): + """ + Source of schedules for valkey cluster. + + This class allows you to store schedules in valkey. + Also it supports dynamic schedules. + + :param sentinels: list of sentinel host and ports pairs. + :param master_name: sentinel master name. + :param prefix: prefix for valkey schedule keys. + :param buffer_size: buffer size for valkey scan. + This is how many keys will be fetched at once. + :param max_connection_pool_size: maximum number of connections in pool. + :param serializer: serializer for data. + :param connection_kwargs: additional arguments for ValkeyCluster. + """ + + def __init__( + self, + sentinels: List[Tuple[str, int]], + master_name: str, + prefix: str = "schedule", + buffer_size: int = 50, + serializer: Optional[TaskiqSerializer] = None, + min_other_sentinels: int = 0, + sentinel_kwargs: Optional[Any] = None, + **connection_kwargs: Any, + ) -> None: + self.prefix = prefix + self.sentinel = Sentinel( + sentinels=sentinels, + min_other_sentinels=min_other_sentinels, + sentinel_kwargs=sentinel_kwargs, + **connection_kwargs, + ) + self.master_name = master_name + self.buffer_size = buffer_size + if serializer is None: + serializer = PickleSerializer() + self.serializer = serializer + + @asynccontextmanager + async def _acquire_master_conn(self) -> AsyncIterator[_Valkey]: + async with self.sentinel.master_for(self.master_name) as valkey_conn: + yield valkey_conn + + async def delete_schedule(self, schedule_id: str) -> None: + """Remove schedule by id.""" + async with self._acquire_master_conn() as valkey: + await valkey.delete(f"{self.prefix}:{schedule_id}") + + async def add_schedule(self, schedule: ScheduledTask) -> None: + """ + Add schedule to valkey. + + :param schedule: schedule to add. + :param schedule_id: schedule id. + """ + async with self._acquire_master_conn() as valkey: + await valkey.set( + f"{self.prefix}:{schedule.schedule_id}", + self.serializer.dumpb(model_dump(schedule)), + ) + + async def get_schedules(self) -> List[ScheduledTask]: + """ + Get all schedules from valkey. + + This method is used by scheduler to get all schedules. + + :return: list of schedules. + """ + schedules = [] + async with self._acquire_master_conn() as valkey: + buffer = [] + async for key in valkey.scan_iter(f"{self.prefix}:*"): + buffer.append(key) + if len(buffer) >= self.buffer_size: + schedules.extend(await valkey.mget(buffer)) + buffer = [] + if buffer: + schedules.extend(await valkey.mget(buffer)) + return [ + model_validate(ScheduledTask, self.serializer.loadb(schedule)) + for schedule in schedules + if schedule + ] + + async def post_send(self, task: ScheduledTask) -> None: + """Delete a task after it's completed.""" + if task.time is not None: + await self.delete_schedule(task.schedule_id) + + async def shutdown(self) -> None: + """Shut down the schedule source.""" + for sentinel in self.sentinel.sentinels: + await sentinel.aclose() diff --git a/taskiq_valkey/valkey_backend.py b/taskiq_valkey/valkey_backend.py new file mode 100644 index 0000000..808e31d --- /dev/null +++ b/taskiq_valkey/valkey_backend.py @@ -0,0 +1,605 @@ +import sys +from contextlib import asynccontextmanager +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Dict, + List, + Optional, + Tuple, + TypeVar, + Union, +) + +from taskiq import AsyncResultBackend +from taskiq.abc.serializer import TaskiqSerializer +from taskiq.compat import model_dump, model_validate +from taskiq.depends.progress_tracker import TaskProgress +from taskiq.result import TaskiqResult +from taskiq.serializers import PickleSerializer +from valkey.asyncio import BlockingConnectionPool, Sentinel, Valkey +from valkey.asyncio.cluster import ValkeyCluster +from valkey.asyncio.connection import Connection + +from taskiq_valkey.exceptions import ( + DuplicateExpireTimeSelectedError, + ExpireTimeMustBeMoreThanZeroError, + ResultIsMissingError, +) + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + _Valkey: TypeAlias = Valkey[bytes] # type: ignore + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore +else: + _Valkey: TypeAlias = Valkey + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool + +_ReturnType = TypeVar("_ReturnType") + +PROGRESS_KEY_SUFFIX = "__progress" + + +class ValkeyAsyncResultBackend(AsyncResultBackend[_ReturnType]): + """Async result based on valkey.""" + + def __init__( + self, + valkey_url: str, + keep_results: bool = True, + result_ex_time: Optional[int] = None, + result_px_time: Optional[int] = None, + max_connection_pool_size: Optional[int] = None, + serializer: Optional[TaskiqSerializer] = None, + prefix_str: Optional[str] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new result backend. + + :param valkey_url: url to valkey. + :param keep_results: flag to not remove results from Valkey after reading. + :param result_ex_time: expire time in seconds for result. + :param result_px_time: expire time in milliseconds for result. + :param max_connection_pool_size: maximum number of connections in pool. + :param connection_kwargs: additional arguments for valkey BlockingConnectionPool. + + :raises DuplicateExpireTimeSelectedError: if result_ex_time + and result_px_time are selected. + :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time + and result_px_time are equal zero. + """ # noqa: E501 + self.valkey_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url( + url=valkey_url, + max_connections=max_connection_pool_size, + **connection_kwargs, + ) + self.serializer = serializer or PickleSerializer() + self.keep_results = keep_results + self.result_ex_time = result_ex_time + self.result_px_time = result_px_time + self.prefix_str = prefix_str + + unavailable_conditions = any( + ( + self.result_ex_time is not None and self.result_ex_time <= 0, + self.result_px_time is not None and self.result_px_time <= 0, + ), + ) + if unavailable_conditions: + raise ExpireTimeMustBeMoreThanZeroError + + if self.result_ex_time and self.result_px_time: + raise DuplicateExpireTimeSelectedError + + def _task_name(self, task_id: str) -> str: + if self.prefix_str is None: + return task_id + return f"{self.prefix_str}:{task_id}" + + async def shutdown(self) -> None: + """Closes valkey connection.""" + await self.valkey_pool.disconnect() + await super().shutdown() + + async def set_result( + self, + task_id: str, + result: TaskiqResult[_ReturnType], + ) -> None: + """ + Sets task result in valkey. + + Dumps TaskiqResult instance into the bytes and writes + it to valkey. + + :param task_id: ID of the task. + :param result: TaskiqResult instance. + """ + valkey_set_params: Dict[str, Union[str, int, bytes]] = { + "name": self._task_name(task_id), + "value": self.serializer.dumpb(model_dump(result)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + async with Valkey(connection_pool=self.valkey_pool) as valkey: + await valkey.set(**valkey_set_params) # type: ignore + + async def is_result_ready(self, task_id: str) -> bool: + """ + Returns whether the result is ready. + + :param task_id: ID of the task. + + :returns: True if the result is ready else False. + """ + async with Valkey(connection_pool=self.valkey_pool) as valkey: + return bool(await valkey.exists(self._task_name(task_id))) + + async def get_result( + self, + task_id: str, + with_logs: bool = False, + ) -> TaskiqResult[_ReturnType]: + """ + Gets result from the task. + + :param task_id: task's id. + :param with_logs: if True it will download task's logs. + :raises ResultIsMissingError: if there is no result when trying to get it. + :return: task's return value. + """ + task_name = self._task_name(task_id) + async with Valkey(connection_pool=self.valkey_pool) as valkey: + if self.keep_results: + result_value = await valkey.get( + name=task_name, + ) + else: + result_value = await valkey.getdel( + name=task_name, + ) + + if result_value is None: + raise ResultIsMissingError + + taskiq_result = model_validate( + TaskiqResult[_ReturnType], + self.serializer.loadb(result_value), + ) + + if not with_logs: + taskiq_result.log = None + + return taskiq_result + + async def set_progress( + self, + task_id: str, + progress: TaskProgress[_ReturnType], + ) -> None: + """ + Sets task progress in valkey. + + Dumps TaskProgress instance into the bytes and writes + it to valkey with a standard suffix on the task_id as the key + + :param task_id: ID of the task. + :param result: task's TaskProgress instance. + """ + valkey_set_params: Dict[str, Union[str, int, bytes]] = { + "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + "value": self.serializer.dumpb(model_dump(progress)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + async with Valkey(connection_pool=self.valkey_pool) as valkey: + await valkey.set(**valkey_set_params) # type: ignore + + async def get_progress( + self, + task_id: str, + ) -> Union[TaskProgress[_ReturnType], None]: + """ + Gets progress results from the task. + + :param task_id: task's id. + :return: task's TaskProgress instance. + """ + async with Valkey(connection_pool=self.valkey_pool) as valkey: + result_value = await valkey.get( + name=self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + ) + + if result_value is None: + return None + + return model_validate( + TaskProgress[_ReturnType], + self.serializer.loadb(result_value), + ) + + +class ValkeyAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]): + """Async result backend based on valkey cluster.""" + + def __init__( + self, + valkey_url: str, + keep_results: bool = True, + result_ex_time: Optional[int] = None, + result_px_time: Optional[int] = None, + serializer: Optional[TaskiqSerializer] = None, + prefix_str: Optional[str] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new result backend. + + :param valkey_url: url to valkey cluster. + :param keep_results: flag to not remove results from Valkey after reading. + :param result_ex_time: expire time in seconds for result. + :param result_px_time: expire time in milliseconds for result. + :param connection_kwargs: additional arguments for ValkeyCluster. + + :raises DuplicateExpireTimeSelectedError: if result_ex_time + and result_px_time are selected. + :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time + and result_px_time are equal zero. + """ + self.valkey: "ValkeyCluster" = ValkeyCluster.from_url( + valkey_url, + **connection_kwargs, + ) + self.serializer = serializer or PickleSerializer() + self.keep_results = keep_results + self.result_ex_time = result_ex_time + self.result_px_time = result_px_time + self.prefix_str = prefix_str + + unavailable_conditions = any( + ( + self.result_ex_time is not None and self.result_ex_time <= 0, + self.result_px_time is not None and self.result_px_time <= 0, + ), + ) + if unavailable_conditions: + raise ExpireTimeMustBeMoreThanZeroError + + if self.result_ex_time and self.result_px_time: + raise DuplicateExpireTimeSelectedError + + def _task_name(self, task_id: str) -> str: + if self.prefix_str is None: + return task_id + return f"{self.prefix_str}:{task_id}" + + async def shutdown(self) -> None: + """Closes valkey connection.""" + await self.valkey.aclose() + await super().shutdown() + + async def set_result( + self, + task_id: str, + result: TaskiqResult[_ReturnType], + ) -> None: + """ + Sets task result in valkey. + + Dumps TaskiqResult instance into the bytes and writes + it to valkey. + + :param task_id: ID of the task. + :param result: TaskiqResult instance. + """ + valkey_set_params: Dict[str, Union[str, bytes, int]] = { + "name": self._task_name(task_id), + "value": self.serializer.dumpb(model_dump(result)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + await self.valkey.set(**valkey_set_params) # type: ignore + + async def is_result_ready(self, task_id: str) -> bool: + """ + Returns whether the result is ready. + + :param task_id: ID of the task. + + :returns: True if the result is ready else False. + """ + return bool(await self.valkey.exists(self._task_name(task_id))) + + async def get_result( + self, + task_id: str, + with_logs: bool = False, + ) -> TaskiqResult[_ReturnType]: + """ + Gets result from the task. + + :param task_id: task's id. + :param with_logs: if True it will download task's logs. + :raises ResultIsMissingError: if there is no result when trying to get it. + :return: task's return value. + """ + task_name = self._task_name(task_id) + if self.keep_results: + result_value = await self.valkey.get( + name=task_name, + ) + else: + result_value = await self.valkey.getdel( + name=task_name, + ) + + if result_value is None: + raise ResultIsMissingError + + taskiq_result: TaskiqResult[_ReturnType] = model_validate( + TaskiqResult[_ReturnType], + self.serializer.loadb(result_value), + ) + + if not with_logs: + taskiq_result.log = None + + return taskiq_result + + async def set_progress( + self, + task_id: str, + progress: TaskProgress[_ReturnType], + ) -> None: + """ + Sets task progress in valkey. + + Dumps TaskProgress instance into the bytes and writes + it to valkey with a standard suffix on the task_id as the key + + :param task_id: ID of the task. + :param result: task's TaskProgress instance. + """ + valkey_set_params: Dict[str, Union[str, int, bytes]] = { + "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + "value": self.serializer.dumpb(model_dump(progress)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + await self.valkey.set(**valkey_set_params) # type: ignore + + async def get_progress( + self, + task_id: str, + ) -> Union[TaskProgress[_ReturnType], None]: + """ + Gets progress results from the task. + + :param task_id: task's id. + :return: task's TaskProgress instance. + """ + result_value = await self.valkey.get( + name=self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + ) + + if result_value is None: + return None + + return model_validate( + TaskProgress[_ReturnType], + self.serializer.loadb(result_value), + ) + + +class ValkeyAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]): + """Async result based on valkey sentinel.""" + + def __init__( + self, + sentinels: List[Tuple[str, int]], + master_name: str, + keep_results: bool = True, + result_ex_time: Optional[int] = None, + result_px_time: Optional[int] = None, + min_other_sentinels: int = 0, + sentinel_kwargs: Optional[Any] = None, + serializer: Optional[TaskiqSerializer] = None, + prefix_str: Optional[str] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new result backend. + + :param sentinels: list of sentinel host and ports pairs. + :param master_name: sentinel master name. + :param keep_results: flag to not remove results from Valkey after reading. + :param result_ex_time: expire time in seconds for result. + :param result_px_time: expire time in milliseconds for result. + :param max_connection_pool_size: maximum number of connections in pool. + :param connection_kwargs: additional arguments for valkey BlockingConnectionPool. + + :raises DuplicateExpireTimeSelectedError: if result_ex_time + and result_px_time are selected. + :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time + and result_px_time are equal zero. + """ # noqa: E501 + self.sentinel = Sentinel( + sentinels=sentinels, + min_other_sentinels=min_other_sentinels, + sentinel_kwargs=sentinel_kwargs, + **connection_kwargs, + ) + self.master_name = master_name + self.serializer = serializer or PickleSerializer() + self.keep_results = keep_results + self.result_ex_time = result_ex_time + self.result_px_time = result_px_time + self.prefix_str = prefix_str + + unavailable_conditions = any( + ( + self.result_ex_time is not None and self.result_ex_time <= 0, + self.result_px_time is not None and self.result_px_time <= 0, + ), + ) + if unavailable_conditions: + raise ExpireTimeMustBeMoreThanZeroError + + if self.result_ex_time and self.result_px_time: + raise DuplicateExpireTimeSelectedError + + def _task_name(self, task_id: str) -> str: + if self.prefix_str is None: + return task_id + return f"{self.prefix_str}:{task_id}" + + @asynccontextmanager + async def _acquire_master_conn(self) -> AsyncIterator[_Valkey]: + async with self.sentinel.master_for(self.master_name) as valkey_conn: + yield valkey_conn + + async def set_result( + self, + task_id: str, + result: TaskiqResult[_ReturnType], + ) -> None: + """ + Sets task result in valkey. + + Dumps TaskiqResult instance into the bytes and writes + it to valkey. + + :param task_id: ID of the task. + :param result: TaskiqResult instance. + """ + valkey_set_params: Dict[str, Union[str, bytes, int]] = { + "name": self._task_name(task_id), + "value": self.serializer.dumpb(model_dump(result)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + async with self._acquire_master_conn() as valkey: + await valkey.set(**valkey_set_params) # type: ignore + + async def is_result_ready(self, task_id: str) -> bool: + """ + Returns whether the result is ready. + + :param task_id: ID of the task. + + :returns: True if the result is ready else False. + """ + async with self._acquire_master_conn() as valkey: + return bool(await valkey.exists(self._task_name(task_id))) + + async def get_result( + self, + task_id: str, + with_logs: bool = False, + ) -> TaskiqResult[_ReturnType]: + """ + Gets result from the task. + + :param task_id: task's id. + :param with_logs: if True it will download task's logs. + :raises ResultIsMissingError: if there is no result when trying to get it. + :return: task's return value. + """ + task_name = self._task_name(task_id) + async with self._acquire_master_conn() as valkey: + if self.keep_results: + result_value = await valkey.get( + name=task_name, + ) + else: + result_value = await valkey.getdel( + name=task_name, + ) + + if result_value is None: + raise ResultIsMissingError + + taskiq_result = model_validate( + TaskiqResult[_ReturnType], + self.serializer.loadb(result_value), + ) + + if not with_logs: + taskiq_result.log = None + + return taskiq_result + + async def set_progress( + self, + task_id: str, + progress: TaskProgress[_ReturnType], + ) -> None: + """ + Sets task progress in valkey. + + Dumps TaskProgress instance into the bytes and writes + it to valkey with a standard suffix on the task_id as the key + + :param task_id: ID of the task. + :param result: task's TaskProgress instance. + """ + valkey_set_params: Dict[str, Union[str, int, bytes]] = { + "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + "value": self.serializer.dumpb(model_dump(progress)), + } + if self.result_ex_time: + valkey_set_params["ex"] = self.result_ex_time + elif self.result_px_time: + valkey_set_params["px"] = self.result_px_time + + async with self._acquire_master_conn() as valkey: + await valkey.set(**valkey_set_params) # type: ignore + + async def get_progress( + self, + task_id: str, + ) -> Union[TaskProgress[_ReturnType], None]: + """ + Gets progress results from the task. + + :param task_id: task's id. + :return: task's TaskProgress instance. + """ + async with self._acquire_master_conn() as valkey: + result_value = await valkey.get( + name=self._task_name(task_id) + PROGRESS_KEY_SUFFIX, + ) + + if result_value is None: + return None + + return model_validate( + TaskProgress[_ReturnType], + self.serializer.loadb(result_value), + ) + + async def shutdown(self) -> None: + """Shutdown sentinel connections.""" + for sentinel in self.sentinel.sentinels: + await sentinel.aclose() diff --git a/taskiq_valkey/valkey_broker.py b/taskiq_valkey/valkey_broker.py new file mode 100644 index 0000000..3494fe7 --- /dev/null +++ b/taskiq_valkey/valkey_broker.py @@ -0,0 +1,280 @@ +import sys +import uuid +from logging import getLogger +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + Awaitable, + Callable, + Dict, + Optional, + TypeVar, +) + +from taskiq import AckableMessage +from taskiq.abc.broker import AsyncBroker +from taskiq.abc.result_backend import AsyncResultBackend +from taskiq.message import BrokerMessage +from valkey.asyncio import BlockingConnectionPool, Connection, ResponseError, Valkey + +_T = TypeVar("_T") + +logger = getLogger("taskiq.valkey_broker") + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore +else: + _BlockingConnectionPool: TypeAlias = BlockingConnectionPool + + +class BaseValkeyBroker(AsyncBroker): + """Base broker that works with Valkey.""" + + def __init__( + self, + url: str, + task_id_generator: Optional[Callable[[], str]] = None, + result_backend: Optional[AsyncResultBackend[_T]] = None, + queue_name: str = "taskiq", + max_connection_pool_size: Optional[int] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new broker. + + :param url: url to valkey. + :param task_id_generator: custom task_id generator. + :param result_backend: custom result backend. + :param queue_name: name for a list in valkey. + :param max_connection_pool_size: maximum number of connections in pool. + Each worker opens its own connection. Therefore this value has to be + at least number of workers + 1. + :param connection_kwargs: additional arguments for valkey BlockingConnectionPool. + """ # noqa: E501 + super().__init__( + result_backend=result_backend, + task_id_generator=task_id_generator, + ) + + self.connection_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url( + url=url, + max_connections=max_connection_pool_size, + **connection_kwargs, + ) + self.queue_name = queue_name + + async def shutdown(self) -> None: + """Closes valkey connection pool.""" + await super().shutdown() + await self.connection_pool.disconnect() + + +class PubSubBroker(BaseValkeyBroker): + """Broker that works with Valkey and broadcasts tasks to all workers.""" + + async def kick(self, message: BrokerMessage) -> None: + """ + Publish message over PUBSUB channel. + + :param message: message to send. + """ + queue_name = message.labels.get("queue_name") or self.queue_name + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + await valkey_conn.publish(queue_name, message.message) + + async def listen(self) -> AsyncGenerator[bytes, None]: + """ + Listen valkey queue for new messages. + + This function listens to the pubsub channel + and yields all messages with proper types. + + :yields: broker messages. + """ + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + valkey_pubsub_channel = valkey_conn.pubsub() + await valkey_pubsub_channel.subscribe(self.queue_name) + async for message in valkey_pubsub_channel.listen(): + if not message: + continue + if message["type"] != "message": + logger.debug("Received non-message from valkey: %s", message) + continue + yield message["data"] + + +class ValkeyStreamBroker(BaseValkeyBroker): + """ + Valkey broker that uses streams for task distribution. + + You can read more about streams here: + https://valkey.io/docs/latest/develop/data-types/streams + + This broker supports acknowledgment of messages. + """ + + def __init__( + self, + url: str, + queue_name: str = "taskiq", + max_connection_pool_size: Optional[int] = None, + consumer_group_name: str = "taskiq", + consumer_name: Optional[str] = None, + consumer_id: str = "$", + mkstream: bool = True, + xread_block: int = 2000, + maxlen: Optional[int] = None, + idle_timeout: int = 600000, # 10 minutes + unacknowledged_batch_size: int = 100, + xread_count: Optional[int] = 100, + additional_streams: Optional[Dict[str, str]] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new broker that uses streams. + + :param url: url to valkey. + :param queue_name: name for a key with stream in valkey. + :param max_connection_pool_size: maximum number of connections in pool. + Each worker opens its own connection. Therefore this value has to be + at least number of workers + 1. + :param consumer_group_name: name for a consumer group. + Valkey will keep track of acked messages for this group. + :param consumer_name: name for a consumer. By default it is a random uuid. + :param consumer_id: id for a consumer. ID of a message to start reading from. + $ means start from the latest message. + :param mkstream: create stream if it does not exist. + :param xread_block: block time in ms for xreadgroup. + Better to set it to a bigger value, to avoid unnecessary calls. + :param maxlen: sets the maximum length of the stream + trims (the old values of) the stream each time a new element is added + :param xread_count: number of messages to fetch from the stream at once. + :param additional_streams: additional streams to read from. + Each key is a stream name, value is a consumer id. + :param redeliver_timeout: time in ms to wait before redelivering a message. + :param unacknowledged_batch_size: number of unacknowledged messages to fetch. + """ + super().__init__( + url, + task_id_generator=None, + result_backend=None, + queue_name=queue_name, + max_connection_pool_size=max_connection_pool_size, + **connection_kwargs, + ) + self.consumer_group_name = consumer_group_name + self.consumer_name = consumer_name or str(uuid.uuid4()) + self.consumer_id = consumer_id + self.mkstream = mkstream + self.block = xread_block + self.maxlen = maxlen + self.additional_streams = additional_streams or {} + self.idle_timeout = idle_timeout + self.unacknowledged_batch_size = unacknowledged_batch_size + self.count = xread_count + + async def _declare_consumer_group(self) -> None: + """ + Declare consumber group. + + Required for proper work of the broker. + """ + streams = {self.queue_name, *self.additional_streams.keys()} + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + for stream_name in streams: + try: + await valkey_conn.xgroup_create( + stream_name, + self.consumer_group_name, + id=self.consumer_id, + mkstream=self.mkstream, + ) + except ResponseError as err: + logger.debug(err) + + async def startup(self) -> None: + """Declare consumer group on startup.""" + await super().startup() + await self._declare_consumer_group() + + async def kick(self, message: BrokerMessage) -> None: + """ + Put a message in a list. + + This method appends a message to the list of all messages. + + :param message: message to append. + """ + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + await valkey_conn.xadd( + self.queue_name, + {b"data": message.message}, + maxlen=self.maxlen, + ) + + def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]: + async def _ack() -> None: + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + await valkey_conn.xack( + self.queue_name, + self.consumer_group_name, + id, + ) + + return _ack + + async def listen(self) -> AsyncGenerator[AckableMessage, None]: + """Listen to incoming messages.""" + async with Valkey(connection_pool=self.connection_pool) as valkey_conn: + while True: + logger.debug("Starting fetching new messages") + fetched = await valkey_conn.xreadgroup( + self.consumer_group_name, + self.consumer_name, + { + self.queue_name: ">", + **self.additional_streams, # type: ignore + }, + block=self.block, + noack=False, + count=self.count, + ) + for _, msg_list in fetched: + for msg_id, msg in msg_list: + logger.debug("Received message: %s", msg) + yield AckableMessage( + data=msg[b"data"], + ack=self._ack_generator(msg_id), + ) + logger.debug("Starting fetching unacknowledged messages") + for stream in [self.queue_name, *self.additional_streams.keys()]: + lock = valkey_conn.lock( + f"autoclaim:{self.consumer_group_name}:{stream}", + ) + if await lock.locked(): + continue + async with lock: + pending = await valkey_conn.xautoclaim( + name=stream, + groupname=self.consumer_group_name, + consumername=self.consumer_name, + min_idle_time=self.idle_timeout, + count=self.unacknowledged_batch_size, + ) + logger.debug( + "Found %d pending messages in stream %s", + len(pending), + stream, + ) + for msg_id, msg in pending[1]: + logger.debug("Received message: %s", msg) + yield AckableMessage( + data=msg[b"data"], + ack=self._ack_generator(msg_id), + ) diff --git a/taskiq_valkey/valkey_cluster_broker.py b/taskiq_valkey/valkey_cluster_broker.py new file mode 100644 index 0000000..e17f22b --- /dev/null +++ b/taskiq_valkey/valkey_cluster_broker.py @@ -0,0 +1,188 @@ +import uuid +from logging import getLogger +from typing import Any, AsyncGenerator, Awaitable, Callable, Dict, Optional + +from taskiq import AckableMessage +from taskiq.abc.broker import AsyncBroker +from taskiq.message import BrokerMessage +from valkey.asyncio import ResponseError, ValkeyCluster + +logger = getLogger(__name__) + + +class BaseValkeyClusterBroker(AsyncBroker): + """Base broker that works with Valkey Cluster.""" + + def __init__( + self, + url: str, + queue_name: str = "taskiq", + max_connection_pool_size: int = 2**31, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new broker. + + :param url: url to valkey. + :param queue_name: name for a list in valkey. + :param max_connection_pool_size: maximum number of connections in pool. + :param connection_kwargs: additional arguments for aio-valkey ConnectionPool. + """ + super().__init__() + + self.valkey: "ValkeyCluster[bytes]" = ValkeyCluster.from_url( # type: ignore + url=url, + max_connections=max_connection_pool_size, + **connection_kwargs, + ) + + self.queue_name = queue_name + + async def shutdown(self) -> None: + """Closes valkey connection pool.""" + await self.valkey.aclose() + await super().shutdown() + + +class ListQueueClusterBroker(BaseValkeyClusterBroker): + """Broker that works with Valkey Cluster and distributes tasks between workers.""" + + async def kick(self, message: BrokerMessage) -> None: + """ + Put a message in a list. + + This method appends a message to the list of all messages. + + :param message: message to append. + """ + await self.valkey.lpush(self.queue_name, message.message) # type: ignore + + async def listen(self) -> AsyncGenerator[bytes, None]: + """ + Listen valkey queue for new messages. + + This function listens to the queue + and yields new messages if they have BrokerMessage type. + + :yields: broker messages. + """ + valkey_brpop_data_position = 1 + while True: + value = await self.valkey.brpop([self.queue_name]) # type: ignore + yield value[valkey_brpop_data_position] + + +class ValkeyStreamClusterBroker(BaseValkeyClusterBroker): + """ + Valkey broker that uses streams for task distribution. + + You can read more about streams here: + https://valkey.io/docs/latest/develop/data-types/streams + + This broker supports acknowledgment of messages. + """ + + def __init__( + self, + url: str, + queue_name: str = "taskiq", + max_connection_pool_size: int = 2**31, + consumer_group_name: str = "taskiq", + consumer_name: Optional[str] = None, + consumer_id: str = "$", + mkstream: bool = True, + xread_block: int = 10000, + additional_streams: Optional[Dict[str, str]] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new broker that uses streams. + + :param url: url to valkey. + :param queue_name: name for a key with stream in valkey. + :param max_connection_pool_size: maximum number of connections in pool. + Each worker opens its own connection. Therefore this value has to be + at least number of workers + 1. + :param consumer_group_name: name for a consumer group. + Valkey will keep track of acked messages for this group. + :param consumer_name: name for a consumer. By default it is a random uuid. + :param consumer_id: id for a consumer. ID of a message to start reading from. + $ means start from the latest message. + :param mkstream: create stream if it does not exist. + :param xread_block: block time in ms for xreadgroup. + Better to set it to a bigger value, to avoid unnecessary calls. + :param additional_streams: additional streams to read from. + Each key is a stream name, value is a consumer id. + """ + super().__init__( + url, + queue_name=queue_name, + max_connection_pool_size=max_connection_pool_size, + **connection_kwargs, + ) + self.consumer_group_name = consumer_group_name + self.consumer_name = consumer_name or str(uuid.uuid4()) + self.consumer_id = consumer_id + self.mkstream = mkstream + self.block = xread_block + self.additional_streams = additional_streams or {} + + async def _declare_consumer_group(self) -> None: + streams = {self.queue_name, *self.additional_streams.keys()} + async with self.valkey as valkey_conn: + for stream_name in streams: + try: + await valkey_conn.xgroup_create( + stream_name, + self.consumer_group_name, + id=self.consumer_id, + mkstream=self.mkstream, + ) + except ResponseError as err: + logger.debug(err) + + async def startup(self) -> None: + """Declare consumer group on startup.""" + await super().startup() + await self._declare_consumer_group() + + async def kick(self, message: BrokerMessage) -> None: + """ + Put a message in a list. + + This method appends a message to the list of all messages. + + :param message: message to append. + """ + await self.valkey.xadd(self.queue_name, {b"data": message.message}) + + def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]: + async def _ack() -> None: + await self.valkey.xack( + self.queue_name, + self.consumer_group_name, + id, + ) + + return _ack + + async def listen(self) -> AsyncGenerator[AckableMessage, None]: + """Listen to the stream for new messages.""" + while True: + fetched = await self.valkey.xreadgroup( + self.consumer_group_name, + self.consumer_name, + { + self.queue_name: ">", + **self.additional_streams, # type: ignore + }, + block=self.block, + noack=False, + ) + for _, msg_list in fetched: + for msg_id, msg in msg_list: + logger.debug("Received message: %s", msg) + yield AckableMessage( + data=msg[b"data"], + ack=self._ack_generator(msg_id), + ) diff --git a/taskiq_valkey/valkey_sentinel_broker.py b/taskiq_valkey/valkey_sentinel_broker.py new file mode 100644 index 0000000..b0cb425 --- /dev/null +++ b/taskiq_valkey/valkey_sentinel_broker.py @@ -0,0 +1,259 @@ +import sys +import uuid +from contextlib import asynccontextmanager +from logging import getLogger +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Dict, + List, + Optional, + Tuple, + TypeVar, +) + +from taskiq import AckableMessage, AsyncResultBackend, BrokerMessage +from taskiq.abc.broker import AsyncBroker +from valkey import ResponseError +from valkey.asyncio import Sentinel, Valkey + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + _Valkey: TypeAlias = Valkey[bytes] # type: ignore +else: + _Valkey: TypeAlias = Valkey + +_T = TypeVar("_T") + +logger = getLogger("taskiq.valkey_sentinel_broker") + + +class BaseSentinelBroker(AsyncBroker): + """Base broker that works with Sentinel.""" + + def __init__( + self, + sentinels: List[Tuple[str, int]], + master_name: str, + result_backend: Optional[AsyncResultBackend[_T]] = None, + task_id_generator: Optional[Callable[[], str]] = None, + queue_name: str = "taskiq", + min_other_sentinels: int = 0, + sentinel_kwargs: Optional[Any] = None, + **connection_kwargs: Any, + ) -> None: + super().__init__( + result_backend=result_backend, + task_id_generator=task_id_generator, + ) + + self.sentinel = Sentinel( + sentinels=sentinels, + min_other_sentinels=min_other_sentinels, + sentinel_kwargs=sentinel_kwargs, + **connection_kwargs, + ) + self.master_name = master_name + self.queue_name = queue_name + + @asynccontextmanager + async def _acquire_master_conn(self) -> AsyncIterator[_Valkey]: + async with self.sentinel.master_for(self.master_name) as valkey_conn: + yield valkey_conn + + +class PubSubSentinelBroker(BaseSentinelBroker): + """Broker that works with Sentinel and broadcasts tasks to all workers.""" + + async def kick(self, message: BrokerMessage) -> None: + """ + Publish message over PUBSUB channel. + + :param message: message to send. + """ + queue_name = message.labels.get("queue_name") or self.queue_name + async with self._acquire_master_conn() as valkey_conn: + await valkey_conn.publish(queue_name, message.message) + + async def listen(self) -> AsyncGenerator[bytes, None]: + """ + Listen valkey queue for new messages. + + This function listens to the pubsub channel + and yields all messages with proper types. + + :yields: broker messages. + """ + async with self._acquire_master_conn() as valkey_conn: + valkey_pubsub_channel = valkey_conn.pubsub() + await valkey_pubsub_channel.subscribe(self.queue_name) + async for message in valkey_pubsub_channel.listen(): + if not message: + continue + if message["type"] != "message": + logger.debug("Received non-message from valkey: %s", message) + continue + yield message["data"] + + +class ListQueueSentinelBroker(BaseSentinelBroker): + """Broker that works with Sentinel and distributes tasks between workers.""" + + async def kick(self, message: BrokerMessage) -> None: + """ + Put a message in a list. + + This method appends a message to the list of all messages. + + :param message: message to append. + """ + queue_name = message.labels.get("queue_name") or self.queue_name + async with self._acquire_master_conn() as valkey_conn: + await valkey_conn.lpush(queue_name, message.message) # type: ignore + + async def listen(self) -> AsyncGenerator[bytes, None]: + """ + Listen valkey queue for new messages. + + This function listens to the queue + and yields new messages if they have BrokerMessage type. + + :yields: broker messages. + """ + valkey_brpop_data_position = 1 + async with self._acquire_master_conn() as valkey_conn: + while True: + yield (await valkey_conn.brpop(self.queue_name))[ # type: ignore + valkey_brpop_data_position + ] + + +class ValkeyStreamSentinelBroker(BaseSentinelBroker): + """ + Valkey broker that uses streams for task distribution. + + You can read more about streams here: + https://valkey.io/docs/latest/develop/data-types/streams + + This broker supports acknowledgment of messages. + """ + + def __init__( + self, + sentinels: List[Tuple[str, int]], + master_name: str, + min_other_sentinels: int = 0, + queue_name: str = "taskiq", + consumer_group_name: str = "taskiq", + consumer_name: Optional[str] = None, + consumer_id: str = "$", + mkstream: bool = True, + xread_block: int = 10000, + additional_streams: Optional[Dict[str, str]] = None, + **connection_kwargs: Any, + ) -> None: + """ + Constructs a new broker that uses streams. + + :param sentinels: list of nodes to connect to. + :param queue_name: name for a key with stream in valkey. + :param max_connection_pool_size: maximum number of connections in pool. + Each worker opens its own connection. Therefore this value has to be + at least number of workers + 1. + :param consumer_group_name: name for a consumer group. + Valkey will keep track of acked messages for this group. + :param consumer_name: name for a consumer. By default it is a random uuid. + :param consumer_id: id for a consumer. ID of a message to start reading from. + $ means start from the latest message. + :param mkstream: create stream if it does not exist. + :param xread_block: block time in ms for xreadgroup. + Better to set it to a bigger value, to avoid unnecessary calls. + :param additional_streams: additional streams to read from. + Each key is a stream name, value is a consumer id. + """ + super().__init__( + sentinels=sentinels, + master_name=master_name, + min_other_sentinels=min_other_sentinels, + task_id_generator=None, + result_backend=None, + queue_name=queue_name, + **connection_kwargs, + ) + self.consumer_group_name = consumer_group_name + self.consumer_name = consumer_name or str(uuid.uuid4()) + self.consumer_id = consumer_id + self.mkstream = mkstream + self.block = xread_block + self.additional_streams = additional_streams or {} + + async def _declare_consumer_group(self) -> None: + streams = {self.queue_name, *self.additional_streams.keys()} + async with self._acquire_master_conn() as valkey_conn: + for stream_name in streams: + try: + await valkey_conn.xgroup_create( + stream_name, + self.consumer_group_name, + id=self.consumer_id, + mkstream=self.mkstream, + ) + except ResponseError as err: + logger.debug(err) + + async def startup(self) -> None: + """Declare consumer group on startup.""" + await super().startup() + await self._declare_consumer_group() + + async def kick(self, message: BrokerMessage) -> None: + """ + Put a message in a list. + + This method appends a message to the list of all messages. + + :param message: message to append. + """ + async with self._acquire_master_conn() as valkey_conn: + await valkey_conn.xadd(self.queue_name, {b"data": message.message}) + + def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]: + async def _ack() -> None: + async with self._acquire_master_conn() as valkey_conn: + await valkey_conn.xack( + self.queue_name, + self.consumer_group_name, + id, + ) + + return _ack + + async def listen(self) -> AsyncGenerator[AckableMessage, None]: + """Listen to the stream for new messages.""" + async with self._acquire_master_conn() as valkey_conn: + while True: + fetched = await valkey_conn.xreadgroup( + self.consumer_group_name, + self.consumer_name, + { + self.queue_name: ">", + **self.additional_streams, # type: ignore + }, + block=self.block, + noack=False, + ) + for _, msg_list in fetched: + for msg_id, msg in msg_list: + logger.debug("Received message: %s", msg) + yield AckableMessage( + data=msg[b"data"], + ack=self._ack_generator(msg_id), + ) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..005c965 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for the package.""" diff --git a/tests/conftest.py b/tests/conftest.py index 038776a..da83eaf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,62 @@ +import os +from typing import List, Tuple + import pytest -@pytest.fixture(scope="session") -def anyio_backend() -> str: +@pytest.fixture +def valkey_url() -> str: + """ + URL to connect to valkey. + + It tries to get it from environ, + and return default one if the variable is + not set. + + :return: URL string. + """ + return os.environ.get("TEST_VALKEY_URL", "valkey://localhost:7000") + + +@pytest.fixture +def valkey_cluster_url() -> str: + """ + URL to connect to valkey cluster. + + It tries to get it from environ, + and return default one if the variable is + not set. + + :return: URL string. """ - Anyio backend. + return os.environ.get("TEST_VALKEY_CLUSTER_URL", "valkey://localhost:7001") + + +@pytest.fixture +def valkey_sentinels() -> List[Tuple[str, int]]: + """ + List of valkey sentinel hosts. + + It tries to get it from environ, + and return default one if the variable is + not set. + + :return: list of host and port pairs. + """ + sentinels = os.environ.get("TEST_VALKEY_SENTINELS", "localhost:7002") + host, _, port = sentinels.partition(":") + return [(host, int(port))] + + +@pytest.fixture +def valkey_sentinel_master_name() -> str: + """ + Valkey sentinel master name. + + It tries to get it from environ, + and return default one if the variable is + not set. - Backend for anyio pytest plugin. - :return: backend name. + :return: valkey sentinel master name string. """ - return "asyncio" + return os.environ.get("TEST_VALKEY_SENTINEL_MASTER_NAME", "myprimary") diff --git a/tests/test_backend.py b/tests/test_backend.py new file mode 100644 index 0000000..21b95db --- /dev/null +++ b/tests/test_backend.py @@ -0,0 +1,268 @@ +import asyncio +import uuid +from typing import Any, TypeVar + +import pytest +from taskiq import TaskiqResult + +from taskiq_valkey import ValkeyAsyncResultBackend +from taskiq_valkey.exceptions import ( + DuplicateExpireTimeSelectedError, + ExpireTimeMustBeMoreThanZeroError, + ResultIsMissingError, +) + +_ReturnType = TypeVar("_ReturnType") + + +class ResultForTest: + """Just test class for testing.""" + + def __init__(self) -> None: + """Generates test class for result testing.""" + self.test_arg = uuid.uuid4() + + +@pytest.fixture +def task_id() -> str: + """ + Generates ID for taskiq result. + + :returns: uuid as string. + """ + return str(uuid.uuid4()) + + +@pytest.fixture +def default_taskiq_result() -> TaskiqResult[Any]: + """ + Generates default TaskiqResult. + + :returns: TaskiqResult with generic result. + """ + return TaskiqResult( + is_err=False, + log=None, + return_value="Best test ever.", + execution_time=0.1, + ) + + +@pytest.fixture +def custom_taskiq_result() -> TaskiqResult[Any]: + """ + Generates custom TaskiqResult. + + :returns: TaskiqResult with custom class result. + """ + return TaskiqResult( + is_err=False, + log=None, + return_value=ResultForTest(), + execution_time=0.1, + ) + + +async def test_success_backend_default_result( + default_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests normal behavior with default result in TaskiqResult. + + :param default_taskiq_result: TaskiqResult with default result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + ) + await backend.set_result( + task_id=task_id, + result=default_taskiq_result, + ) + result = await backend.get_result(task_id=task_id) + + assert result == default_taskiq_result + await backend.shutdown() + + +async def test_error_backend_custom_result( + custom_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests normal behavior with custom result in TaskiqResult. + + Setting custom class as a result should raise an error. + + :param custom_taskiq_result: TaskiqResult with custom result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + ) + with pytest.raises(ValueError): + await backend.set_result( + task_id=task_id, + result=custom_taskiq_result, + ) + + await backend.shutdown() + + +async def test_cant_specify_ex_and_px_params( + valkey_url: str, +) -> None: + """ + Tests the impossibility of specifying this and this at the same time. + + :param valkey_url: url to valkey. + """ + with pytest.raises(DuplicateExpireTimeSelectedError): + ValkeyAsyncResultBackend(valkey_url, result_ex_time=1, result_px_time=1) + + +@pytest.mark.parametrize( + "ex_time, px_time", + [(0, 0), (-500, 0), (0, -500), (-500, -500)], +) +async def test_ex_or_px_must_be_more_than_zero( + ex_time: int, + px_time: int, + valkey_url: str, +) -> None: + """ + Tests that at least ex or px params must be specified. + + :param valkey_url: url to valkey. + """ + with pytest.raises(ExpireTimeMustBeMoreThanZeroError): + ValkeyAsyncResultBackend( + valkey_url, + result_ex_time=ex_time, + result_px_time=px_time, + ) + + +async def test_success_backend_expire_ex_param( + default_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests ex param. + + Here we test normal behavior, so we get result before expire time. + + :param default_taskiq_result: TaskiqResult with default result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + result_ex_time=1, + ) + await backend.set_result( + task_id=task_id, + result=default_taskiq_result, + ) + await asyncio.sleep(0.5) + + result = await backend.get_result(task_id=task_id) + + assert result == default_taskiq_result + await backend.shutdown() + + +async def test_unsuccess_backend_expire_ex_param( + default_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests ex param. + + Here we test bad behavior, so we can't get result + because expire time is over. + + :param default_taskiq_result: TaskiqResult with default result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + result_ex_time=1, + ) + await backend.set_result( + task_id=task_id, + result=default_taskiq_result, + ) + await asyncio.sleep(1.1) + + with pytest.raises(ResultIsMissingError): + await backend.get_result(task_id=task_id) + await backend.shutdown() + + +async def test_success_backend_expire_px_param( + default_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests px param. + + Here we test normal behavior, so we get result before expire time. + + :param default_taskiq_result: TaskiqResult with default result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + result_px_time=1000, + ) + await backend.set_result( + task_id=task_id, + result=default_taskiq_result, + ) + await asyncio.sleep(0.5) + + result = await backend.get_result(task_id=task_id) + + assert result == default_taskiq_result + await backend.shutdown() + + +async def test_unsuccess_backend_expire_px_param( + default_taskiq_result: TaskiqResult[_ReturnType], + task_id: str, + valkey_url: str, +) -> None: + """ + Tests px param. + + Here we test bad behavior, so we can't get result + because expire time is over. + + :param default_taskiq_result: TaskiqResult with default result. + :param task_id: ID for task. + :param valkey_url: url to valkey. + """ + backend: ValkeyAsyncResultBackend[_ReturnType] = ValkeyAsyncResultBackend( + valkey_url, + result_px_time=1000, + ) + await backend.set_result( + task_id=task_id, + result=default_taskiq_result, + ) + await asyncio.sleep(1.1) + + with pytest.raises(ResultIsMissingError): + await backend.get_result(task_id=task_id) + await backend.shutdown() diff --git a/tests/test_broker.py b/tests/test_broker.py new file mode 100644 index 0000000..495b79b --- /dev/null +++ b/tests/test_broker.py @@ -0,0 +1,281 @@ +import asyncio +import uuid +from typing import List, Tuple, Union + +import pytest +from taskiq import AckableMessage, AsyncBroker, BrokerMessage + +from taskiq_valkey import ( + ListQueueClusterBroker, + ListQueueSentinelBroker, + PubSubBroker, + PubSubSentinelBroker, + ValkeyStreamClusterBroker, + ValkeyStreamSentinelBroker, +) +from taskiq_valkey.valkey_broker import ValkeyStreamBroker + + +def test_no_url_should_raise_typeerror() -> None: + """Test that url is expected.""" + with pytest.raises(TypeError): + PubSubBroker() # type: ignore + + +async def get_message( + broker: AsyncBroker, +) -> Union[bytes, AckableMessage]: + """ + Get a message from the broker. + + :param broker: async message broker. + :return: first message from listen method. + """ + async for message in broker.listen(): + return message + return b"" + + +@pytest.fixture +def valid_broker_message() -> BrokerMessage: + """ + Generate valid broker message for tests. + + :returns: broker message. + """ + return BrokerMessage( + task_id=uuid.uuid4().hex, + task_name=uuid.uuid4().hex, + message=b"my_msg", + labels={ + "label1": "val1", + }, + ) + + +async def test_pub_sub_broker( + valid_broker_message: BrokerMessage, + valkey_url: str, +) -> None: + """ + Test that messages are published and read correctly by PubSubBroker. + + We create two workers that listen and send a message to them. + Expect both workers to receive the same message we sent. + """ + broker = PubSubBroker(url=valkey_url, queue_name=uuid.uuid4().hex) + worker1_task = asyncio.create_task(get_message(broker)) + worker2_task = asyncio.create_task(get_message(broker)) + await asyncio.sleep(0.3) + + await broker.kick(valid_broker_message) + await asyncio.sleep(0.3) + + message1 = worker1_task.result() + message2 = worker2_task.result() + assert message1 == valid_broker_message.message + assert message1 == message2 + await broker.shutdown() + + +async def test_pub_sub_broker_max_connections( + valid_broker_message: BrokerMessage, + valkey_url: str, +) -> None: + """Test PubSubBroker with connection limit set.""" + broker = PubSubBroker( + url=valkey_url, + queue_name=uuid.uuid4().hex, + max_connection_pool_size=4, + timeout=1, + ) + worker_tasks = [asyncio.create_task(get_message(broker)) for _ in range(3)] + await asyncio.sleep(0.3) + + await asyncio.gather(*[broker.kick(valid_broker_message) for _ in range(50)]) + await asyncio.sleep(0.3) + + for worker in worker_tasks: + worker.cancel() + await broker.shutdown() + + +async def test_stream_broker( + valid_broker_message: BrokerMessage, + valkey_url: str, +) -> None: + """ + Test that messages are published and read correctly by ListQueueBroker. + + We create two workers that listen and send a message to them. + Expect only one worker to receive the same message we sent. + """ + broker = ValkeyStreamBroker( + url=valkey_url, + queue_name=uuid.uuid4().hex, + consumer_group_name=uuid.uuid4().hex, + ) + await broker.startup() + + worker1_task = asyncio.create_task(get_message(broker)) + worker2_task = asyncio.create_task(get_message(broker)) + + await broker.kick(valid_broker_message) + + await asyncio.wait( + [worker1_task, worker2_task], + return_when=asyncio.FIRST_COMPLETED, + ) + + assert worker1_task.done() != worker2_task.done() + message = worker1_task.result() if worker1_task.done() else worker2_task.result() + assert isinstance(message, AckableMessage) + assert message.data == valid_broker_message.message + await message.ack() # type: ignore + worker1_task.cancel() + worker2_task.cancel() + await broker.shutdown() + + +async def test_list_queue_cluster_broker( + valid_broker_message: BrokerMessage, + valkey_cluster_url: str, +) -> None: + """ + Test that messages are published and read correctly by ListQueueClusterBroker. + + We create two workers that listen and send a message to them. + Expect only one worker to receive the same message we sent. + """ + broker = ListQueueClusterBroker( + url=valkey_cluster_url, + queue_name=uuid.uuid4().hex, + ) + worker_task = asyncio.create_task(get_message(broker)) + await asyncio.sleep(0.3) + + await broker.kick(valid_broker_message) + await asyncio.sleep(0.3) + + assert worker_task.done() + assert worker_task.result() == valid_broker_message.message + worker_task.cancel() + await broker.shutdown() + + +async def test_stream_cluster_broker( + valid_broker_message: BrokerMessage, + valkey_cluster_url: str, +) -> None: + """ + Test that messages are published and read correctly by ListQueueClusterBroker. + + We create two workers that listen and send a message to them. + Expect only one worker to receive the same message we sent. + """ + broker = ValkeyStreamClusterBroker( + url=valkey_cluster_url, + queue_name=uuid.uuid4().hex, + consumer_group_name=uuid.uuid4().hex, + ) + await broker.startup() + + worker_task = asyncio.create_task(get_message(broker)) + + await broker.kick(valid_broker_message) + + result = await worker_task + + assert isinstance(result, AckableMessage) + assert result.data == valid_broker_message.message + await result.ack() # type: ignore + worker_task.cancel() + await broker.shutdown() + + +async def test_pub_sub_sentinel_broker( + valid_broker_message: BrokerMessage, + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Test that messages are published and read correctly by PubSubSentinelBroker. + + We create two workers that listen and send a message to them. + Expect both workers to receive the same message we sent. + """ + broker = PubSubSentinelBroker( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + queue_name=uuid.uuid4().hex, + ) + worker1_task = asyncio.create_task(get_message(broker)) + worker2_task = asyncio.create_task(get_message(broker)) + await asyncio.sleep(0.3) + + await broker.kick(valid_broker_message) + await asyncio.sleep(0.3) + + message1 = worker1_task.result() + message2 = worker2_task.result() + assert message1 == valid_broker_message.message + assert message1 == message2 + await broker.shutdown() + + +async def test_list_queue_sentinel_broker( + valid_broker_message: BrokerMessage, + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Test that messages are published and read correctly by ListQueueSentinelBroker. + + We create two workers that listen and send a message to them. + Expect only one worker to receive the same message we sent. + """ + broker = ListQueueSentinelBroker( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + queue_name=uuid.uuid4().hex, + ) + worker_task = asyncio.create_task(get_message(broker)) + await asyncio.sleep(0.3) + + await broker.kick(valid_broker_message) + await asyncio.sleep(0.3) + + assert worker_task.done() + assert worker_task.result() == valid_broker_message.message + worker_task.cancel() + await broker.shutdown() + + +async def test_streams_sentinel_broker( + valid_broker_message: BrokerMessage, + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Test that messages are published and read correctly by ListQueueSentinelBroker. + + We create two workers that listen and send a message to them. + Expect only one worker to receive the same message we sent. + """ + broker = ValkeyStreamSentinelBroker( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + queue_name=uuid.uuid4().hex, + consumer_group_name=uuid.uuid4().hex, + ) + await broker.startup() + worker_task = asyncio.create_task(get_message(broker)) + + await broker.kick(valid_broker_message) + + result = await worker_task + assert isinstance(result, AckableMessage) + assert result.data == valid_broker_message.message + await result.ack() # type: ignore + worker_task.cancel() + await broker.shutdown() diff --git a/tests/test_list_schedule_source.py b/tests/test_list_schedule_source.py new file mode 100644 index 0000000..76316d6 --- /dev/null +++ b/tests/test_list_schedule_source.py @@ -0,0 +1,96 @@ +import datetime +import uuid + +from freezegun import freeze_time +from taskiq import ScheduledTask + +from taskiq_valkey.list_schedule_source import ListValkeyScheduleSource + + +@freeze_time("2025-01-01 00:00:00") +async def test_schedule_cron(valkey_url: str) -> None: + """Test adding a cron schedule.""" + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + scehdules = await source.get_schedules() + assert scehdules == [schedule] + + +@freeze_time("2025-01-01 00:00:00") +async def test_schedule_from_past(valkey_url: str) -> None: + """Test adding a cron schedule.""" + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=datetime.datetime.now(datetime.timezone.utc) + - datetime.timedelta(minutes=4), + ) + await source.add_schedule(schedule) + # When running for the first time, the scheduler will get all the + # schedules that are in the past. + schedules = await source.get_schedules() + assert schedules == [schedule] + for schedule in schedules: + await source.post_send(schedule) + # After getting the schedules for the second time, + # all the schedules in the past are ignored. + schedules = await source.get_schedules() + assert schedules == [] + + +@freeze_time("2025-01-01 00:00:00") +async def test_schedule_removal(valkey_url: str) -> None: + """Test adding a cron schedule.""" + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=datetime.datetime.now(datetime.timezone.utc) + + datetime.timedelta(minutes=4), + ) + await source.add_schedule(schedule) + # When running for the first time, the scheduler will get all the + # schedules that are in the past. + scehdules = await source.get_schedules() + assert scehdules == [] + # Assert that we will get the schedule after the time has passed. + with freeze_time("2025-01-01 00:04:00"): + scehdules = await source.get_schedules() + assert scehdules == [schedule] + + +@freeze_time("2025-01-01 00:00:00") +async def test_deletion(valkey_url: str) -> None: + """Test adding a cron schedule.""" + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=datetime.datetime.now(datetime.timezone.utc), + ) + await source.add_schedule(schedule) + # When running for the first time, the scheduler will get all the + # schedules that are in the past. + scehdules = await source.get_schedules() + assert scehdules == [schedule] + await source.delete_schedule(schedule.schedule_id) + scehdules = await source.get_schedules() + assert scehdules == [] diff --git a/tests/test_result_backend.py b/tests/test_result_backend.py new file mode 100644 index 0000000..d6ed807 --- /dev/null +++ b/tests/test_result_backend.py @@ -0,0 +1,546 @@ +import asyncio +import uuid +from typing import List, Tuple + +import pytest +from taskiq import TaskiqResult +from taskiq.depends.progress_tracker import TaskProgress, TaskState + +from taskiq_valkey import ( + ValkeyAsyncClusterResultBackend, + ValkeyAsyncResultBackend, + ValkeyAsyncSentinelResultBackend, +) +from taskiq_valkey.exceptions import ResultIsMissingError + + +async def test_set_result_success(valkey_url: str) -> None: + """ + Tests that results can be set without errors. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=True, + ) + assert fetched_result.log == "My Log" + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_fetch_without_logs(valkey_url: str) -> None: + """ + Check if fetching value without logs works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=False, + ) + assert fetched_result.log is None + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_remove_results_after_reading(valkey_url: str) -> None: + """ + Check if removing results after reading works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + keep_results=False, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + await result_backend.get_result(task_id=task_id) + with pytest.raises(ResultIsMissingError): + await result_backend.get_result(task_id=task_id) + + await result_backend.shutdown() + + +async def test_keep_results_after_reading(valkey_url: str) -> None: + """ + Check if keeping results after reading works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + keep_results=True, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + res1 = await result_backend.get_result(task_id=task_id) + res2 = await result_backend.get_result(task_id=task_id) + assert res1 == res2 + await result_backend.shutdown() + + +async def test_set_result_max_connections(valkey_url: str) -> None: + """ + Tests that asynchronous backend works with connection limit. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + max_connection_pool_size=1, + timeout=3, + ) + + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + async def get_result() -> None: + await result_backend.get_result(task_id=task_id, with_logs=True) + + await asyncio.gather(*[get_result() for _ in range(10)]) + await result_backend.shutdown() + + +async def test_set_result_success_cluster(valkey_cluster_url: str) -> None: + """ + Tests that results can be set without errors in cluster mode. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncClusterResultBackend( # type: ignore + valkey_url=valkey_cluster_url, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=True, + ) + assert fetched_result.log == "My Log" + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_fetch_without_logs_cluster(valkey_cluster_url: str) -> None: + """ + Check if fetching value without logs works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncClusterResultBackend( # type: ignore + valkey_url=valkey_cluster_url, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=False, + ) + assert fetched_result.log is None + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_remove_results_after_reading_cluster(valkey_cluster_url: str) -> None: + """ + Check if removing results after reading works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncClusterResultBackend( # type: ignore + valkey_url=valkey_cluster_url, + keep_results=False, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + await result_backend.get_result(task_id=task_id) + with pytest.raises(ResultIsMissingError): + await result_backend.get_result(task_id=task_id) + + await result_backend.shutdown() + + +async def test_keep_results_after_reading_cluster(valkey_cluster_url: str) -> None: + """ + Check if keeping results after reading works fine. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncClusterResultBackend( # type: ignore + valkey_url=valkey_cluster_url, + keep_results=True, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + res1 = await result_backend.get_result(task_id=task_id) + res2 = await result_backend.get_result(task_id=task_id) + assert res1 == res2 + await result_backend.shutdown() + + +async def test_set_result_success_sentinel( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Tests that results can be set without errors in cluster mode. + + :param valkey_sentinels: list of host and port pairs. + :param valkey_sentinel_master_name: valkey sentinel master name string. + """ + result_backend = ValkeyAsyncSentinelResultBackend( # type: ignore + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=True, + ) + assert fetched_result.log == "My Log" + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_fetch_without_logs_sentinel( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Check if fetching value without logs works fine. + + :param valkey_sentinels: list of host and port pairs. + :param valkey_sentinel_master_name: valkey sentinel master name string. + """ + result_backend = ValkeyAsyncSentinelResultBackend( # type: ignore + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + fetched_result = await result_backend.get_result( + task_id=task_id, + with_logs=False, + ) + assert fetched_result.log is None + assert fetched_result.return_value == 11 + assert fetched_result.execution_time == 112.2 + assert fetched_result.is_err + await result_backend.shutdown() + + +async def test_remove_results_after_reading_sentinel( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Check if removing results after reading works fine. + + :param valkey_sentinels: list of host and port pairs. + :param valkey_sentinel_master_name: valkey sentinel master name string. + """ + result_backend = ValkeyAsyncSentinelResultBackend( # type: ignore + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + keep_results=False, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + await result_backend.get_result(task_id=task_id) + with pytest.raises(ResultIsMissingError): + await result_backend.get_result(task_id=task_id) + + await result_backend.shutdown() + + +async def test_keep_results_after_reading_sentinel( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Check if keeping results after reading works fine. + + :param valkey_sentinels: list of host and port pairs. + :param valkey_sentinel_master_name: valkey sentinel master name string. + """ + result_backend = ValkeyAsyncSentinelResultBackend( # type: ignore + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + keep_results=True, + ) + task_id = uuid.uuid4().hex + result: "TaskiqResult[int]" = TaskiqResult( + is_err=True, + log="My Log", + return_value=11, + execution_time=112.2, + ) + await result_backend.set_result( + task_id=task_id, + result=result, + ) + + res1 = await result_backend.get_result(task_id=task_id) + res2 = await result_backend.get_result(task_id=task_id) + assert res1 == res2 + await result_backend.shutdown() + + +async def test_set_progress(valkey_url: str) -> None: + """ + Test that set_progress/get_progress works. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncResultBackend( # type: ignore + valkey_url=valkey_url, + ) + task_id = uuid.uuid4().hex + + test_progress_1 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "quarter way", "pct": 25}, + ) + test_progress_2 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "half way", "pct": 50}, + ) + + # Progress starts as None + assert await result_backend.get_progress(task_id=task_id) is None + + # Setting the first time persists + await result_backend.set_progress(task_id=task_id, progress=test_progress_1) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_1 + + # Setting the second time replaces the first + await result_backend.set_progress(task_id=task_id, progress=test_progress_2) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_2 + + await result_backend.shutdown() + + +async def test_set_progress_cluster(valkey_cluster_url: str) -> None: + """ + Test that set_progress/get_progress works in cluster mode. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncClusterResultBackend( # type: ignore + valkey_url=valkey_cluster_url, + ) + task_id = uuid.uuid4().hex + + test_progress_1 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "quarter way", "pct": 25}, + ) + test_progress_2 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "half way", "pct": 50}, + ) + + # Progress starts as None + assert await result_backend.get_progress(task_id=task_id) is None + + # Setting the first time persists + await result_backend.set_progress(task_id=task_id, progress=test_progress_1) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_1 + + # Setting the second time replaces the first + await result_backend.set_progress(task_id=task_id, progress=test_progress_2) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_2 + + await result_backend.shutdown() + + +async def test_set_progress_sentinel( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + """ + Test that set_progress/get_progress works in cluster mode. + + :param valkey_url: valkey URL. + """ + result_backend = ValkeyAsyncSentinelResultBackend( # type: ignore + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + ) + task_id = uuid.uuid4().hex + + test_progress_1 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "quarter way", "pct": 25}, + ) + test_progress_2 = TaskProgress( + state=TaskState.STARTED, + meta={"message": "half way", "pct": 50}, + ) + + # Progress starts as None + assert await result_backend.get_progress(task_id=task_id) is None + + # Setting the first time persists + await result_backend.set_progress(task_id=task_id, progress=test_progress_1) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_1 + + # Setting the second time replaces the first + await result_backend.set_progress(task_id=task_id, progress=test_progress_2) + + fetched_result = await result_backend.get_progress(task_id=task_id) + assert fetched_result == test_progress_2 + + await result_backend.shutdown() diff --git a/tests/test_schedule_source.py b/tests/test_schedule_source.py new file mode 100644 index 0000000..a6d32a5 --- /dev/null +++ b/tests/test_schedule_source.py @@ -0,0 +1,361 @@ +import asyncio +import datetime as dt +import uuid +from typing import List, Tuple + +from taskiq import ScheduledTask + +from taskiq_valkey import ( + ListValkeyScheduleSource, + ValkeyClusterScheduleSource, + ValkeySentinelScheduleSource, +) + + +async def test_set_schedule(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.shutdown() + + +async def test_delete_schedule(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.delete_schedule(schedule.schedule_id) + schedules = await source.get_schedules() + # Schedules are empty. + assert not schedules + await source.shutdown() + + +async def test_post_run_cron(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [schedule] + await source.shutdown() + + +async def test_post_run_time(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=dt.datetime(2000, 1, 1), + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [] + await source.shutdown() + + +async def test_buffer(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource(valkey_url, prefix=prefix, buffer_size=1) + schedule1 = ScheduledTask( + task_name="test_task1", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + schedule2 = ScheduledTask( + task_name="test_task2", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule1) + await source.add_schedule(schedule2) + schedules = await source.get_schedules() + assert len(schedules) == 2 + assert schedule1 in schedules + assert schedule2 in schedules + await source.shutdown() + + +async def test_max_connections(valkey_url: str) -> None: + prefix = uuid.uuid4().hex + source = ListValkeyScheduleSource( + valkey_url, + prefix=prefix, + max_connection_pool_size=1, + timeout=3, + ) + await asyncio.gather(*[source.get_schedules() for _ in range(10)]) + + +async def test_cluster_set_schedule(valkey_cluster_url: str) -> None: + prefix = uuid.uuid4().hex + source = ValkeyClusterScheduleSource(valkey_cluster_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.shutdown() + + +async def test_cluster_delete_schedule(valkey_cluster_url: str) -> None: + prefix = uuid.uuid4().hex + source = ValkeyClusterScheduleSource(valkey_cluster_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.delete_schedule(schedule.schedule_id) + schedules = await source.get_schedules() + # Schedules are empty. + assert not schedules + await source.shutdown() + + +async def test_cluster_post_run_cron(valkey_cluster_url: str) -> None: + prefix = uuid.uuid4().hex + source = ValkeyClusterScheduleSource(valkey_cluster_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [schedule] + await source.shutdown() + + +async def test_cluster_post_run_time(valkey_cluster_url: str) -> None: + prefix = uuid.uuid4().hex + source = ValkeyClusterScheduleSource(valkey_cluster_url, prefix=prefix) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=dt.datetime(2000, 1, 1), + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [] + await source.shutdown() + + +async def test_cluster_get_schedules(valkey_cluster_url: str) -> None: + """ + Test of a valkey cluster source. + + This test checks that if the schedules are located on different nodes, + the source will still be able to get them all. + + To simulate this we set a specific shard key for each schedule. + The shard keys are from this gist: + + https://gist.githubusercontent.com/dvirsky/93f43277317f629bb06e858946416f7e/raw/b0438faf6f5a0020c12a0730f6cd6ac4bdc4b171/crc16_slottable.h + + """ + prefix = uuid.uuid4().hex + source = ValkeyClusterScheduleSource(valkey_cluster_url, prefix=prefix) + schedule1 = ScheduledTask( + schedule_id=r"id-{06S}", + task_name="test_task1", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + schedule2 = ScheduledTask( + schedule_id=r"id-{4Rs}", + task_name="test_task2", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule1) + await source.add_schedule(schedule2) + schedules = await source.get_schedules() + assert len(schedules) == 2 + assert schedule1 in schedules + assert schedule2 in schedules + await source.shutdown() + + +async def test_sentinel_set_schedule( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + prefix = uuid.uuid4().hex + source = ValkeySentinelScheduleSource( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + prefix=prefix, + ) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.shutdown() + + +async def test_sentinel_delete_schedule( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + prefix = uuid.uuid4().hex + source = ValkeySentinelScheduleSource( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + prefix=prefix, + ) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + schedules = await source.get_schedules() + assert schedules == [schedule] + await source.delete_schedule(schedule.schedule_id) + schedules = await source.get_schedules() + # Schedules are empty. + assert not schedules + await source.shutdown() + + +async def test_sentinel_post_run_cron( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + prefix = uuid.uuid4().hex + source = ValkeySentinelScheduleSource( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + prefix=prefix, + ) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [schedule] + await source.shutdown() + + +async def test_sentinel_post_run_time( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + prefix = uuid.uuid4().hex + source = ValkeySentinelScheduleSource( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + prefix=prefix, + ) + schedule = ScheduledTask( + task_name="test_task", + labels={}, + args=[], + kwargs={}, + time=dt.datetime(2000, 1, 1), + ) + await source.add_schedule(schedule) + assert await source.get_schedules() == [schedule] + await source.post_send(schedule) + assert await source.get_schedules() == [] + await source.shutdown() + + +async def test_sentinel_buffer( + valkey_sentinels: List[Tuple[str, int]], + valkey_sentinel_master_name: str, +) -> None: + prefix = uuid.uuid4().hex + source = ValkeySentinelScheduleSource( + sentinels=valkey_sentinels, + master_name=valkey_sentinel_master_name, + prefix=prefix, + buffer_size=1, + ) + schedule1 = ScheduledTask( + task_name="test_task1", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + schedule2 = ScheduledTask( + task_name="test_task2", + labels={}, + args=[], + kwargs={}, + cron="* * * * *", + ) + await source.add_schedule(schedule1) + await source.add_schedule(schedule2) + schedules = await source.get_schedules() + assert len(schedules) == 2 + assert schedule1 in schedules + assert schedule2 in schedules + await source.shutdown()