diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..b780dfd7 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,28 @@ +root = true + + +[*] +charset = utf-8 +insert_final_newline = true +indent_style = space +indent_size = 2 + +[*.md] +indent_size = 2 +max_line_length = 80 + +[{*.py,*.ipynb}] +indent_size = 4 +max_line_length = 88 + +[*.sh] +# like -i=4 +indent_style = space +indent_size = 4 + +shell_variant = bash # --language-variant +binary_next_line = true +switch_case_indent = true # --case-indent +space_redirects = true +keep_padding = false +function_next_line = true # --func-next-line diff --git a/.editorconfig-checker.json b/.editorconfig-checker.json new file mode 100644 index 00000000..684cbc35 --- /dev/null +++ b/.editorconfig-checker.json @@ -0,0 +1,19 @@ +{ + "Version": "v3.4.0", + "Verbose": false, + "Debug": false, + "IgnoreDefaults": false, + "SpacesAfterTabs": false, + "NoColor": false, + "Exclude": ["^\\.idea/", "\\.md$", "\\.py$", "\\.ipynb$"], + "AllowedContentTypes": [], + "PassedFiles": [], + "Disable": { + "EndOfLine": false, + "Indentation": false, + "InsertFinalNewline": false, + "TrimTrailingWhitespace": false, + "IndentSize": true, + "MaxLineLength": false + } +} diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..adc0809a --- /dev/null +++ b/.gitattributes @@ -0,0 +1,108 @@ +# Common settings that generally should always be used with your language specific settings + +# Auto detect text files and perform LF normalization +# https://www.davidlaing.com/2012/09/19/customise-your-gitattributes-to-become-a-git-ninja/ +* text=auto + +# +# The above will handle all files NOT found below +# + +# Documents +*.bibtex text diff=bibtex +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain +*.md text +*.tex text diff=tex +*.adoc text +*.textile text +*.mustache text +*.csv text +*.tab text +*.tsv text +*.txt text +*.sql text + +# Graphics +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.tif binary +*.tiff binary +*.ico binary +# SVG treated as an asset (binary) by default. +*.svg text +# If you want to treat it as binary, +# use the following line instead. +# *.svg binary +*.eps binary + +# Scripts +*.bash text eol=lf +*.fish text eol=lf +*.sh text eol=lf +# These are explicitly windows files and should use crlf +*.bat text eol=crlf +*.cmd text eol=crlf +*.ps1 text eol=crlf + +# Serialisation +*.json text +*.toml text +*.xml text +*.yaml text +*.yml text + +# Archives +*.7z binary +*.gz binary +*.tar binary +*.tgz binary +*.zip binary + +# Text files where line endings should be preserved +*.patch -text + +# +# Exclude files from exporting +# + +.gitattributes export-ignore +.gitignore export-ignore +# Basic .gitattributes for a python repo. + +# Source files +# ============ +*.pxd text diff=python +*.py text diff=python +*.py3 text diff=python +*.pyw text diff=python +*.pyx text diff=python +*.pyz text diff=python + +# Binary files +# ============ +*.db binary +*.p binary +*.pkl binary +*.pickle binary +*.pyc binary +*.pyd binary +*.pyo binary + +# Jupyter notebook +*.ipynb text + +# Note: .db, .p, and .pkl files are associated +# with the python modules ``pickle``, ``dbm.*``, +# ``shelve``, ``marshal``, ``anydbm``, & ``bsddb`` +# (among others). diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..e5fd8908 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,48 @@ +name: pre-commit + +on: + pull_request: + push: + branches: + - main + - develop + - feature-* + +jobs: + pre-commit: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + packages: write + pull-requests: write + steps: + - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: 0.8.22 + python-version: 3.11 + enable-cache: true + cache-suffix: pre-commit + cache-dependency-glob: uv.lock + - name: Restore venv cache + uses: actions/cache@v4 + with: + path: | + .venv + key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }} + - name: Restore mypy cache + uses: actions/cache@v4 + with: + path: .mypy_cache + key: mypy_cache|${{ hashFiles('pyproject.toml') }} + - name: Restore pre-commit cache + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: pre-commit-4|${{ hashFiles('.pre-commit-config.yaml') }} + - name: Synchronize project dependencies + run: uv sync --group dev + - name: Run pre-commit checks + run: uv run pre-commit run --show-diff-on-failure --all-files --hook-stage manual diff --git a/.github/workflows/test-and-publish.yml b/.github/workflows/test-and-publish.yml new file mode 100644 index 00000000..ead3bbb9 --- /dev/null +++ b/.github/workflows/test-and-publish.yml @@ -0,0 +1,80 @@ +name: Test and Publish + +on: + pull_request: + push: + branches: + - main + - develop + - feature-* + release: + types: + - published + +jobs: + tests: + name: Tests (Python ${{ matrix.python-version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] + permissions: + id-token: write + contents: read + packages: write + pull-requests: write + steps: + - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: 0.8.22 + python-version: ${{ matrix.python-version }} + enable-cache: true + cache-suffix: test-and-publish + cache-dependency-glob: uv.lock + - name: Run tests with nox + run: uvx nox --python ${{ matrix.python-version }} --session tests -- --no-parallel + env: + PDFREST_API_KEY: ${{ secrets.PDFREST_API_KEY }} + + publish: + name: Publish to CodeArtifact + needs: tests + if: github.event_name == 'release' + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + packages: write + env: + UV_PROJECT_ENVIRONMENT: .venv-release + steps: + - uses: actions/checkout@v4 + - name: Assume AWS role for repository CI + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::304774597385:role/cit-oidc-role-${{ github.event.repository.name }}-ci + aws-region: us-east-2 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: 0.8.22 + enable-cache: true + cache-suffix: pre-commit + cache-dependency-glob: uv.lock + - name: Restore venv cache + uses: actions/cache@v4 + with: + path: | + ${{ env.UV_PROJECT_ENVIRONMENT }} + key: ${{ runner.os }}-uv-release-${{ hashFiles('pyproject.toml') }} + - name: Install keyring + run: uv tool install keyring --with keyrings.codeartifact + - name: Synchronize project dependencies + run: uv sync --group dev + - name: Build distribution artifacts + run: uv build --python 3.11 + - name: Publish package to CodeArtifact + run: uv publish --index cit-pypi diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..ad36eefe --- /dev/null +++ b/.gitignore @@ -0,0 +1,453 @@ +# Created by https://www.toptal.com/developers/gitignore/api/vim,emacs,linux,macos,python,pycharm,windows,git +# Edit at https://www.toptal.com/developers/gitignore?templates=vim,emacs,linux,macos,python,pycharm,windows,git + +### Emacs ### +# -*- mode: gitignore; -*- +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +*.elc +auto-save-list +tramp +.\#* + +# Org-mode +.org-id-locations +*_archive + +# flymake-mode +*_flymake.* + +# eshell files +/eshell/history +/eshell/lastdir + +# elpa packages +/elpa/ + +# reftex files +*.rel + +# AUCTeX auto folder +/auto/ + +# cask packages +.cask/ +dist/ + +# Flycheck +flycheck_*.el + +# server auth directory +/server/ + +# projectiles files +.projectile + +# directory configuration +.dir-locals.el + +# network security +/network-security.data + + +### Git ### +# Created by git for backups. To disable backups in Git: +# $ git config --global mergetool.keepBackup false +*.orig + +# Created by git when using merge tools for conflicts +*.BACKUP.* +*.BASE.* +*.LOCAL.* +*.REMOTE.* +*_BACKUP_*.txt +*_BASE_*.txt +*_LOCAL_*.txt +*_REMOTE_*.txt + +### Linux ### + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# in version control. + +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/vim,emacs,linux,macos,python,pycharm,windows,git + +# Files for building Docker containers that don't have to get checked in +/installers/ + +# Database files +*.db +*.sqlite* + +# PDM + +# Profiling +*.prof.txt +*.speedscope.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..fe7b88e0 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,92 @@ +# For info on multiline regular expressions: https://pre-commit.com/index.html#regular-expressions +# Verbose regular expressions '(?x)': https://docs.python.org/3.9/library/re.html#re.X +exclude: | + (?x)^( + .idea/| + .venv/ + ) +default_install_hook_types: [pre-commit, pre-merge-commit, pre-push] +default_stages: [pre-commit, pre-merge-commit, pre-push, manual] +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - id: end-of-file-fixer + - id: check-toml + - id: check-yaml + - id: check-added-large-files + args: [--maxkb=4000] + - repo: https://github.com/JoC0de/pre-commit-prettier + rev: v3.6.2 + hooks: + - id: prettier + exclude: .md$ + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.34.0 + hooks: + - id: check-github-workflows + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: 3.4.0 + hooks: + - id: editorconfig-checker + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.22 + hooks: + - id: mdformat + name: mdformat on non-.github files + exclude: ^.github/ + args: ["--wrap", "80", "--number"] + additional_dependencies: + - mdformat-gfm + - mdformat-frontmatter + - mdformat-footnote + - mdformat-toc + - id: mdformat + name: mdformat on .github files + files: ^.github/.*$ + args: ["--wrap", "no", "--number"] + additional_dependencies: + - mdformat-gfm + - mdformat-frontmatter + - mdformat-footnote + - mdformat-toc + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: v0.11.0.1 + hooks: + - id: shellcheck + args: [-x] + - repo: https://github.com/maxwinterstein/shfmt-py + rev: v3.12.0.1 + hooks: + - id: shfmt + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.13.3 + hooks: + - id: ruff-check + types_or: [python, pyi, jupyter] + - id: ruff-format + types_or: [python, pyi, jupyter] + - repo: https://github.com/pappasam/toml-sort + rev: v0.24.3 + hooks: + - id: toml-sort-fix + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.18.2" + hooks: + - id: mypy + stages: [pre-push, manual] + name: mypy + additional_dependencies: + # Need pydantic to load the pydantic.mypy plugin + - pydantic>=2.12.0 + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.8.24 + hooks: + - id: uv-lock + - repo: https://github.com/RobertCraigie/pyright-python + rev: v1.1.406 + hooks: + - id: pyright diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..2c073331 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..cd47344b --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,193 @@ +# Repository Guidelines + +## Project Structure & Module Organization + +- Source lives in `src/pdfrest/`; expose public APIs via `__all__` and keep + package metadata in `pyproject.toml`. +- Tests sit in `tests/` mirroring the module layout (e.g., + `tests/test_client.py`). +- Workflow definitions are in `.github/workflows/`; adjust only when CI + requirements change. +- Documentation and contributor notes reside at the repo root (`README.md`, + `AGENTS.md`). Automation sessions live in `noxfile.py`; keep shared task logic + there. + +## Build, Test, and Development Commands + +- `uv sync --group dev` — create/update the virtual environment with lint, + type-check, and test tooling. +- `uv run pre-commit run --all-files` — enforce formatting and lint rules before + pushing. +- `uv run pytest` — execute the suite with the active interpreter. +- `uv build` — produce wheels and sdists identical to the release workflow. +- `uvx nox -s tests` — create matrix virtualenvs via nox and execute the pytest + session. +- `nox` executes pytest sessions with built-in parallelism; when invoking pytest + directly use `pytest -n 8 --maxschedchunk 2` to mirror the parallel test + scheduling and keep runtimes predictable. + +## Coding Style & Naming Conventions + +- Target Python 3.10–3.14; use 4-space indentation and type hints for public + APIs. +- Black + isort (via ruff) enforce formatting; run through pre-commit prior to + review. +- Use `snake_case` for functions/modules, `PascalCase` for classes, and + `UPPER_SNAKE_CASE` for constants. +- Prefer `pathlib`, f-strings, and other modern stdlib features—pyupgrade rules + will flag legacy code. +- When calling pdfRest, supply the API key via the `Api-Key` header (not + `Authorization: Bearer`); keep tests and client defaults in sync with this + convention. +- Treat `PdfRestClient` and `AsyncPdfRestClient` as context managers in both + production code and tests so transports are disposed deterministically. +- When uploading content, always send the multipart field name `file`; when + uploading by URL, send a JSON payload using the `url` key with a list of + http/https addresses (single values are promoted to lists internally). +- `prepare_request` rejects mixed multipart (`files`) and JSON payloads; only + URL uploads (`create_from_urls`) should combine JSON bodies with the request. +- Replicate server-side safeguards when porting validation logic: the output + prefix must stay basename-only, reject reserved names (`profile.json`, + `metadata.json`), forbid leading dots or special characters, and report the + offending characters in error messages. Page-range validation operates on each + list item individually—accepts positive integers, `last`, or ranges like + `1-3`/`6-last`—and must raise errors that match the front-end wording. +- Combine multiple synchronous context managers in a single `with` statement + (ruff enforces `SIM117`). When an async context manager participates (e.g., + `async with AsyncPdfRestClient(...)`), nest any synchronous companions such as + `pytest.raises` inside the async block—Python forbids mixing `async with` and + regular `with` clauses in the same statement. When working with `HttpUrl` + objects, cast to `str` before string operations such as suffix checks. *When + using `pytest.raises`, prefer combining it into the same `with` clause as + another synchronous context manager when semantics allow.* +- For image conversions, adapt request data with `BasePdfRestGraphicPayload` + generics; name concrete payloads `BmpPdfRestPayload`, `GifPdfRestPayload`, + `JpegPdfRestPayload`, `PngPdfRestPayload`, and `TiffPdfRestPayload`. Client + helpers should accept a `payload_model` argument and use fully spelled-out + method names such as `convert_to_jpeg`/`convert_to_tiff` (avoid historic + three-letter suffixes). +- Define reusable literals and simple aliases under `src/pdfrest/types/` and + import them from `pdfrest.types` (e.g., `PdfInfoQuery`) instead of reaching + into underscored modules. Treat that package as the public surface for shared + type contracts consumed by both clients and tests. +- Payload models that reference uploaded resources should accept + `list[PdfRestFile]` with explicit length bounds and serialize IDs for the + allowed cardinality (`serialization_alias="id"` plus a serializer that emits + either the first id when `max_length == 1` or a list when larger). Client + helpers should pass sequences through without converting to raw IDs manually. +- When a payload accepts uploaded content, validate MIME types via + `_allowed_mime_types` to surface clear errors before making the request. +- When an endpoint expects JSON-encoded structures (e.g., arrays of redaction + rules), expose typed arguments (TypedDicts, Literals, etc.) via + `pdfrest.types` and let the payload serializer produce the JSON string for the + request body. +- Client helpers that consume existing resources must accept `PdfRestFile` + instances (optionally sequences) rather than raw IDs or strings; use the + `files` client helpers to resolve file IDs before invoking conversion or + metadata routes. +- For document splitting and merging, expose rich Python types on the client + surface (`PdfPageSelection`, `PdfMergeInput`) and validate them through the + `PdfSplitPayload`/`PdfMergePayload` models. Normalize per-output page groups + with the shared page-range validator, default merge items without explicit + ranges to `"1-last"`, and serialize merge requests into the parallel `id`, + `pages`, and `type` arrays that pdfRest expects (always emitting `"id"` for + `type[]`). Split/merge payloads accept descending ranges (e.g., `"9-2"`) and + the `"even"`/`"odd"` selectors; graphic conversions remain limited to positive + numbers, `"last"`, and ascending ranges to match the live API behaviour. +- Favor declarative Pydantic validation over bespoke “normalize” helpers: define + nested models, unions, and annotated tuples that parse complex strings into + typed structures (as with the split/merge page-range tuples) and let small + validators enforce the constraints (`BeforeValidator` for parsing, + `AfterValidator` for relational checks). Reserve standalone normalization + functions for behaviour that cannot live on the schema—simpler models produce + clearer errors and are easier for new contributors to understand. +- When adding new services, provide per-endpoint test modules mirroring PNG’s + coverage: parameterized successes for every allowed literal value, request + customization (sync + async), validation failures, and multi-file guards. Add + a shared validation suite when multiple endpoints rely on the same input rules + (e.g., `tests/test_graphic_payload_validation.py`). +- Do not import from private modules (names beginning with an underscore) in + tests or production code—expose any shared helpers via a public module first. + +## Testing Guidelines + +- Write pytest tests: files named `test_*.py`, test functions `test_*`, fixtures + in `conftest.py` where shared. +- Ensure high-value coverage of public functions and edge cases; document intent + in test docstrings when non-obvious. +- Use `uvx nox -s tests` to exercise the full interpreter matrix locally when + validating compatibility. +- When writing live tests for URL uploads, first create the remote resources via + `create_from_paths`, then reuse the returned URLs in `create_from_urls` to + avoid relying on third-party availability. +- For parameterized tests prefer `pytest.param(..., id="short-label")` so test + IDs stay readable; make assertions for every relevant response attribute (name + prefix, MIME type, size, URLs, warnings). +- Avoid manual loops over test parameters; prefer `@pytest.mark.parametrize` + with explicit `id=` values so each combination is visible and reproducible. +- Always couple `pytest.raises` with an explicit `match=` regex that reflects + the intended validation error wording—mirror the human-readable text rather + than relying on default exception formatting. +- Mirror PNG’s request/response scenarios for each graphic conversion endpoint: + maintain per-endpoint test modules (`test_convert_to_png.py`, + `test_convert_to_bmp.py`, etc.) covering success, parameter customization, + validation errors, multi-file guards, and async flows. Keep shared payload + validation (output prefix and page-range cases) in a dedicated suite (e.g., + `tests/test_graphic_payload_validation.py`) that exercises every payload + model. +- When introducing additional pdfRest endpoints, follow the same pattern used + for graphic conversions: encapsulate shared request validation in a typed + payload model, expose fully named client methods, and create a dedicated test + module per endpoint that verifies success paths, request customization, + validation errors, and async behavior. Centralize any reusable validation + checks (e.g., common field requirements, payload serialization) in shared + helper tests so new services inherit consistent coverage with minimal + duplication. +- Prefer `pytest.mark.parametrize` (with `pytest.param(..., id="...")`) over + explicit loops inside tests; nest parametrization for multi-dimensional + coverage so each case appears as an individual test item. +- Live tests should verify that literal enumerations match pdfRest’s accepted + values. Exercise format-specific options (e.g., each image format’s + `color_model`) individually, and run smoothing enumerations through every + enabled endpoint to confirm consistent server behaviour. Include “wildly” + invalid values (e.g., bogus literals or mixed lists) alongside boundary + failures so the server-side error messaging is exercised. +- Provide live integration tests under `tests/live/` (with an `__init__.py` so + pytest discovers the package) that introspect payload models to enumerate + valid/invalid literal values and numeric boundaries. These tests should vary a + single parameter per request, assert success for legal inputs, and confirm + pdfRest raises errors for out-of-range or unsupported values. When bypassing + local validation to reach the server (e.g., for negative tests), inject the + override via `extra_body` and expect `PdfRestApiError` (or the precise + exception surfaced by the client). When test fixtures produce deterministic + results (e.g., `tests/resources/report.pdf`), assert the concrete values + returned by pdfRest rather than only checking for presence or type. +- Use `tests/resources/20-pages.pdf` for high-page-count scenarios such as split + and merge endpoints so boundary coverage (multi-output splits, staggered page + selections) remains reproducible. Parameterize live split/merge tests to cover + multiple page-group patterns, and pair each success case with an invalid input + that reaches the server by overriding the JSON body via `extra_body`. +- Developers can load a pdfRest API key from `.env` during ad-hoc exploration. + The repo includes `python-dotenv`; call `load_dotenv()` (optionally pointing + to `.env`) in temporary scripts to drive the in-flight client against live + endpoints and capture responses for test data and assertions. + +## Commit & Pull Request Guidelines + +- Follow the `area: summary` convention seen in `pdfassistant-chatbot` (e.g., + `client: Add document merge service`). +- Keep commit messages imperative and focused; squash fixups before opening a + PR. +- Reference related issues or tickets in the PR description, and highlight + breaking changes. +- Confirm CI passes (`pre-commit`, Python matrix) and note any manual + verification or screenshots for behaviour updates. + +## CI & Publishing Notes + +- GitHub Actions run two workflows: `pre-commit` (no AWS credentials) and + `Test and Publish` (Python 3.10–3.14 matrix). +- Only the release job assumes the AWS OIDC role to `uv build` and publish with + `uv publish`. +- Keep CodeArtifact credentials out of source control; day-to-day development + should rely solely on public dependencies. diff --git a/README.md b/README.md index e3213b7d..2dad8fa5 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,31 @@ -# pdfrest-python -Python API library for pdfRest +# pdfrest + +Python client library for the PDFRest service. The project is managed with +[uv](https://docs.astral.sh/uv/) and targets Python 3.9 and newer. + +## Getting started + +```bash +uv sync +uv run python -c "import pdfrest; print(pdfrest.__version__)" +``` + +## Development + +To install the tooling used by CI locally, include the `--group dev` flag: + +```bash +uv sync --group dev +``` + +It is recommended to enable the pre-commit hooks after installation: + +```bash +uv run pre-commit install +``` + +Run the test suite with: + +```bash +uv run pytest +``` diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000..dd7ed111 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,44 @@ +import argparse + +import nox + +nox.options.default_venv_backend = "uv" + +python_versions = ["3.10", "3.11", "3.12", "3.13", "3.14"] + + +@nox.session(name="tests", python=python_versions, reuse_venv=True) +def tests(session: nox.Session) -> None: + # Define only custom flags + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument("--no-parallel", action="store_true") + parser.add_argument( + "-n", "--workers", "--numprocesses" + ) # e.g., -n 4 to set workers + custom, remaining = parser.parse_known_args(session.posargs) + + pytest_args = list(remaining) + + # Default to parallel unless disabled or overridden + if custom.no_parallel: + pass + elif custom.workers: + pytest_args[:0] = ["-n", custom.workers, "--maxschedchunk", "2"] + else: + pytest_args[:0] = ["-n", "8", "--maxschedchunk", "2"] + + session.run_install( + "uv", + "sync", + "--no-default-groups", + "--group=dev", + "--reinstall-package=pdfrest", + f"--python={session.virtualenv.location}", + env={"UV_PROJECT_ENVIRONMENT": session.virtualenv.location}, + ) + session.run( + "pytest", + "--cov=pdfrest", + "--cov-report=term-missing", + *pytest_args, + ) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..36d26e53 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,193 @@ +[project] +name = "pdfrest" +version = "0.1.0" +description = "Python client library for interacting with the PDFRest API" +readme = "README.md" +authors = [ + {name = "Datalogics"}, +] +requires-python = ">=3.10" +dependencies = [ + "httpx>=0.28.1", + "pydantic>=2.12.0", +] + +[build-system] +requires = ["uv_build>=0.8.22,<0.10.0"] +build-backend = "uv_build" + +[dependency-groups] +dev = [ + "pre-commit>=3.7.0", + "ruff>=0.6.9", + "pytest>=8.3.3", + "pytest-cov>=5.0.0", + "mypy>=1.18.2", + "pip-audit>=2.7.3", + "pyright>=1.1.406", + "pytest-md>=0.2.0", + "pytest-emoji>=0.2.0", + "pytest-dotenv>=0.5.2", + "pytest-asyncio>=1.2.0", + "pytest-rerunfailures>=16.0.1", + "pytest-xdist>=3.8.0", + "nox>=2025.5.1", +] + +[tool.mypy] +python_version = "3.10" +pretty = true +plugins = [ + "pydantic.mypy", +] +# Discover modules that were installed in the virtualenv +python_executable = ".venv/bin/python" +fixed_format_cache = true +ignore_missing_imports = false +follow_imports = "silent" +# Balanced strictness (not “strict = true”); catches many bugs without being overbearing +no_implicit_optional = true +check_untyped_defs = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_return_any = true +warn_no_return = true +strict_equality = true +# Defaults here stay a bit lenient; ratchet up over time if desired +disallow_untyped_defs = false +disallow_incomplete_defs = false +disallow_untyped_calls = false +disallow_any_generics = false +implicit_reexport = true +namespace_packages = true +show_error_codes = true +# Typical project structure +packages = ["pdfrest"] # if using setuptools; or rely on src/ layout +exclude = '(build|dist|\.venv|scripts|examples|docs)' + +# Example: tighten src/, loosen tests/ +[[tool.mypy.overrides]] +module = ["pdfrest.*"] +disallow_untyped_defs = true +disallow_incomplete_defs = true + +[[tool.mypy.overrides]] +module = ["tests.*"] +disallow_untyped_defs = false +allow_redefinition = true + +[tool.pyright] +venvPath = "." +venv = ".venv" +typeCheckingMode = "standard" +pythonVersion = "3.10" +reportMissingTypeStubs = true +# mypy catches this, so catch it in pyright too +reportOptionalMemberAccess = true +# Sensible signal without being punitive; tune severities as needed +reportUnusedImport = "error" +reportUnusedVariable = "error" +reportUnknownMemberType = "warning" +reportUnknownArgumentType = "warning" +reportUnknownVariableType = "warning" +reportPrivateUsage = "error" +# Keep false positives low in typical library code +useLibraryCodeForTypes = true +# Project layout +include = ["src", "tests"] +exclude = [ + "**/.venv", + "build", + "dist", + "scripts", + "examples", + "docs", +] + +[[tool.pyright.executionEnvironments]] +root = "src" + +[[tool.pyright.executionEnvironments]] +root = "tests" +typeCheckingMode = "basic" +reportUnknownMemberType = "none" +reportUnknownArgumentType = "none" +reportUnknownVariableType = "none" +reportPrivateUsage = "none" + +[tool.pytest.ini_options] +minversion = "7.4" +testpaths = ["tests"] +addopts = "-ra" + +[tool.ruff] +extend-include = ["*.ipynb"] +target-version = "py310" + +[tool.ruff.lint] +# Enable the pycodestyle (`E`) and Pyflakes (`F`) rules by default. +# Rules cribbed from the PDM sources themselves. +extend-select = [ + # If you're doing a project of any substantial size, or anything that runs on a server, + # use logging instead of printing. To check this, uncomment the next line. + # "T20", # print https://docs.astral.sh/ruff/rules/#flake8-print-t20 don't print, use logging + "I", # isort https://beta.ruff.rs/docs/rules/#isort-i + "B", # flake8-bugbear https://beta.ruff.rs/docs/rules/#flake8-bugbear-b + "C4", # flake8-comprehensions https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 + "PGH", # pygrep-hooks https://beta.ruff.rs/docs/rules/#pygrep-hooks-pgh + "RUF", # ruff https://beta.ruff.rs/docs/rules/#ruff-specific-rules-ruf + "W", # pycodestyle https://beta.ruff.rs/docs/rules/#warning-w + "YTT", # flake8-2020 https://beta.ruff.rs/docs/rules/#flake8-2020-ytt + "UP", # pyupgrade https://beta.ruff.rs/docs/rules/#pyupgrade-up + "N", # naming https://beta.ruff.rs/docs/rules/#pep8-naming-n + "PT", # pytest https://beta.ruff.rs/docs/rules/#flake8-pytest-style-pt + # https://dev.to/aws-builders/deploy-to-aws-with-github-actions-and-aws-cdk-4m1e suggests + # complexity checks with radon/xenon, but McCabe complexity is available in ruff and + # the cost/benefit of using radon/xenon can be discussed later. + "C90", # mccabe https://beta.ruff.rs/docs/rules/#mccabe-c90 + "FURB", # refurb https://beta.ruff.rs/docs/rules/#refurb-furb + "S", # bandit https://docs.astral.sh/ruff/rules/#flake8-bandit-s ...secure code + "BLE", # blind-except https://docs.astral.sh/ruff/rules/#flake8-blind-except-ble + "EM", # errmsg https://docs.astral.sh/ruff/rules/#flake8-errmsg-em + "G", # logging-format https://docs.astral.sh/ruff/rules/#flake8-logging-format-g + "RET", # return https://docs.astral.sh/ruff/rules/#flake8-return-ret + "DTZ", # datetimez https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz + "SIM", # simplify https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "ARG", # unused arguments https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg + "PTH", # use pathlib https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth + "RSE", # raise https://docs.astral.sh/ruff/rules/#flake8-raise-rse + "TRY", # tryceratops https://docs.astral.sh/ruff/rules/#tryceratops-try exception antipatterns + "COM818", # prohibit trailing bare commas making tuples, see https://docs.astral.sh/ruff/rules/trailing-comma-on-bare-tuple/ +] +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +ignore = [] +unfixable = [] + +[tool.ruff.lint.isort] +combine-as-imports = true +known-first-party = ["pdfrest"] + +[tool.ruff.lint.per-file-ignores] +"tests/**/*.py" = [ + # From https://github.com/astral-sh/ruff/issues/4368#issue-1705468153 + # at least this three should be fine in tests: + "S101", # asserts allowed in tests... + "ARG", # Unused function args -> fixtures nevertheless are functionally relevant... + "FBT", # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize() +] + +[tool.tomlsort] +sort_first = ["project", "build-system"] +spaces_before_inline_comment = 2 +spaces_indent_inline_array = 4 +trailing_comma_inline_array = true + +[tool.uv] +keyring-provider = "subprocess" + +[[tool.uv.index]] +name = "cit-pypi" +url = "https://aws@datalogics-304774597385.d.codeartifact.us-east-2.amazonaws.com/pypi/cit-pypi/simple/" +publish-url = "https://aws@datalogics-304774597385.d.codeartifact.us-east-2.amazonaws.com/pypi/cit-pypi/" +username = "__token__" diff --git a/src/pdfrest/__init__.py b/src/pdfrest/__init__.py new file mode 100644 index 00000000..f18112ed --- /dev/null +++ b/src/pdfrest/__init__.py @@ -0,0 +1,36 @@ +"""Top-level package for the pdfrest client library.""" + +from importlib import metadata + +from .client import AsyncPdfRestClient, PdfRestClient +from .exceptions import ( + PdfRestApiError, + PdfRestAuthenticationError, + PdfRestConfigurationError, + PdfRestError, + PdfRestRequestError, + PdfRestTimeoutError, + PdfRestTransportError, + translate_httpx_error, +) +from .models import UpResponse + +__all__ = ( + "AsyncPdfRestClient", + "PdfRestApiError", + "PdfRestAuthenticationError", + "PdfRestClient", + "PdfRestConfigurationError", + "PdfRestError", + "PdfRestRequestError", + "PdfRestTimeoutError", + "PdfRestTransportError", + "UpResponse", + "__version__", + "translate_httpx_error", +) + +try: # pragma: no cover - fallback should never run in production builds + __version__ = metadata.version("pdfrest") +except metadata.PackageNotFoundError: # pragma: no cover + __version__ = "0.0.0" diff --git a/src/pdfrest/client.py b/src/pdfrest/client.py new file mode 100644 index 00000000..8b8ea380 --- /dev/null +++ b/src/pdfrest/client.py @@ -0,0 +1,2281 @@ +"""Sync and async client interfaces for the pdfrest API.""" + +from __future__ import annotations + +import asyncio +import importlib.metadata +import json +import os +import uuid +from collections.abc import AsyncIterator, Iterator, Mapping, Sequence +from contextlib import ExitStack +from os import PathLike +from pathlib import Path +from typing import IO, Any, Generic, Literal, TypeAlias, TypeVar, cast + +import httpx +from httpx import URL +from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_validator + +from .exceptions import ( + PdfRestApiError, + PdfRestAuthenticationError, + PdfRestConfigurationError, + translate_httpx_error, +) +from .models import ( + PdfRestErrorResponse, + PdfRestFile, + PdfRestFileBasedResponse, + PdfRestFileID, + PdfRestInfoResponse, + UpResponse, +) + +__all__ = ("AsyncPdfRestClient", "PdfRestClient") + +from .models._internal import ( + BasePdfRestGraphicPayload, + BmpPdfRestPayload, + GifPdfRestPayload, + JpegPdfRestPayload, + PdfInfoPayload, + PdfMergePayload, + PdfRedactionApplyPayload, + PdfRedactionPreviewPayload, + PdfRestRawFileResponse, + PdfSplitPayload, + PngPdfRestPayload, + TiffPdfRestPayload, + UploadURLs, +) +from .types import ( + ALL_PDF_INFO_QUERIES, + PdfInfoQuery, + PdfMergeInput, + PdfPageSelection, + PdfRedactionInstruction, + PdfRGBColor, +) + +DEFAULT_BASE_URL = "https://api.pdfrest.com" +API_KEY_ENV_VAR = "PDFREST_API_KEY" +API_KEY_HEADER_NAME = "Api-Key" +DEFAULT_GENERAL_TIMEOUT_SECONDS = 10.0 +DEFAULT_READ_TIMEOUT_SECONDS = 120.0 +FILE_UPLOAD_FIELD_NAME = "file" +DEFAULT_FILE_INFO_CONCURRENCY = 8 + +HttpMethod = Literal["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "HEAD"] +QueryParamValue = str | int | float | bool | None +TimeoutTypes = float | httpx.Timeout | None +AnyMapping = Mapping[str, Any] +Query = Mapping[str, QueryParamValue] +Body = Mapping[str, Any] + +FileContent = IO[bytes] | bytes | str +FileTuple2 = tuple[str | None, FileContent] +FileTuple3 = tuple[str | None, FileContent, str | None] +FileTuple4 = tuple[str | None, FileContent, str | None, Mapping[str, str]] +FileTypes = FileContent | FileTuple2 | FileTuple3 | FileTuple4 +UploadFiles = Sequence[FileTypes] | FileTypes + +FilePath = str | PathLike[str] +FilePathTuple2 = tuple[FilePath, str | None] +FilePathTuple3 = tuple[FilePath, str | None, Mapping[str, str]] +FilePathTypes = FilePath | FilePathTuple2 | FilePathTuple3 +FilePathInput = FilePathTypes | Sequence[FilePathTypes] +UrlValue = str | URL +UrlInput = UrlValue | Sequence[UrlValue] +NormalizedFileTypes: TypeAlias = FileContent | FileTuple2 | FileTuple3 | FileTuple4 +DestinationPath = str | PathLike[str] + + +def _default_timeout() -> httpx.Timeout: + return httpx.Timeout( + timeout=DEFAULT_GENERAL_TIMEOUT_SECONDS, + read=DEFAULT_READ_TIMEOUT_SECONDS, + ) + + +def _extract_uploaded_file_ids(payload: Any) -> list[str]: + try: + files_payload = payload["files"] + except (TypeError, KeyError) as exc: # pragma: no cover - defensive + raise PdfRestApiError( + 500, message="Upload response missing 'files' collection." + ) from exc + if not isinstance(files_payload, Sequence): # pragma: no cover - defensive + raise PdfRestApiError(500, message="Upload response 'files' is not a sequence.") + entries = cast(Sequence[Mapping[str, Any]], files_payload) + file_ids: list[str] = [] + for entry in entries: + if "id" not in entry: + raise PdfRestApiError( + 500, message="Upload response contains invalid file references." + ) + file_ids.append(str(entry["id"])) + return file_ids + + +def _normalize_headers(headers: Mapping[str, str]) -> Mapping[str, str]: + return {str(key): str(value) for key, value in headers.items()} + + +def _ensure_file_content(value: FileContent) -> FileContent: + if isinstance(value, (bytes, str)): + return value + if hasattr(value, "read"): + return value + msg = "File content must be a readable binary stream, bytes, or str." + raise TypeError(msg) + + +def _normalize_file_type(file_value: FileTypes) -> NormalizedFileTypes: + if isinstance(file_value, tuple): + length = len(file_value) + if length not in {2, 3, 4}: + msg = "File tuple inputs must contain 2, 3, or 4 items." + raise TypeError(msg) + if length == 2: + filename, content = cast(FileTuple2, file_value) + normalized_filename = str(filename) if filename is not None else None + normalized_content = _ensure_file_content(content) + return (normalized_filename, normalized_content) + if length == 3: + filename, content, content_type = cast(FileTuple3, file_value) + normalized_filename = str(filename) if filename is not None else None + normalized_content = _ensure_file_content(content) + normalized_content_type = ( + str(content_type) if content_type is not None else None + ) + return (normalized_filename, normalized_content, normalized_content_type) + + filename, content, content_type, headers = cast(FileTuple4, file_value) + normalized_filename = str(filename) if filename is not None else None + normalized_content = _ensure_file_content(content) + normalized_content_type = ( + str(content_type) if content_type is not None else None + ) + if not isinstance(headers, Mapping): + msg = "Headers must be provided as a mapping of str keys to str values." + raise TypeError(msg) + normalized_headers = _normalize_headers(headers) + return ( + normalized_filename, + normalized_content, + normalized_content_type, + normalized_headers, + ) + return _ensure_file_content(file_value) + + +def _normalize_upload_files( + files: UploadFiles, +) -> list[tuple[str, NormalizedFileTypes]]: + if isinstance(files, Mapping): + msg = "Upload files must be provided as a sequence or a single file specification." + raise TypeError(msg) + + if isinstance(files, Sequence) and not isinstance(files, (str, bytes, bytearray)): + items = list(files) + else: + # Treat single file specification as a one-element sequence. + items = [cast(FileTypes, files)] + + if not items: + msg = "At least one file must be provided." + raise ValueError(msg) + normalized_items: list[tuple[str, NormalizedFileTypes]] = [] + for file_value in items: + normalized_items.append( + (FILE_UPLOAD_FIELD_NAME, _normalize_file_type(cast(FileTypes, file_value))) + ) + return normalized_items + + +def _parse_path_spec(spec: FilePathTypes) -> tuple[Path, str | None, Mapping[str, str]]: + if isinstance(spec, tuple): + length = len(spec) + if length == 2: + raw_path, content_type = cast(FilePathTuple2, spec) + headers: Mapping[str, str] = {} + elif length == 3: + raw_path, content_type, headers = cast(FilePathTuple3, spec) + if not isinstance(headers, Mapping): + msg = "Headers must be provided as a mapping of str keys to str values." + raise TypeError(msg) + else: + msg = "File path tuples must contain a path plus optional content type and headers." + raise TypeError(msg) + normalized_headers = _normalize_headers(headers) + normalized_content_type = ( + str(content_type) if content_type is not None else None + ) + path = Path(raw_path) + return path, normalized_content_type, normalized_headers + path = Path(spec) + return path, None, {} + + +def _normalize_path_inputs( + file_paths: FilePathInput, +) -> list[FilePathTypes]: + if isinstance(file_paths, Sequence) and not isinstance( + file_paths, (str, bytes, bytearray) + ): + sequence_paths = cast(Sequence[FilePathTypes], file_paths) + items: list[FilePathTypes] = list(sequence_paths) + else: + items = [cast(FilePathTypes, file_paths)] + if not items: + msg = "At least one file path must be provided." + raise ValueError(msg) + return items + + +def _resolve_file_id(file_ref: PdfRestFile | str) -> str: + return file_ref.id if isinstance(file_ref, PdfRestFile) else str(file_ref) + + +def _normalize_file_id(file_ref: PdfRestFileID | str) -> PdfRestFileID: + if isinstance(file_ref, PdfRestFileID): + return file_ref + return PdfRestFileID(str(file_ref)) + + +ClientType = TypeVar("ClientType", httpx.Client, httpx.AsyncClient) + + +class _ClientConfig(BaseModel): + """Internal representation of client configuration validated by Pydantic.""" + + base_url: URL + api_key: str | None = None + timeout: TimeoutTypes = Field(default_factory=_default_timeout) + headers: dict[str, str] = Field(default_factory=dict) + + model_config = ConfigDict(arbitrary_types_allowed=True) + + @field_validator("base_url", mode="before") + @classmethod + def _parse_base_url(cls, value: Any) -> URL: + url_value = value or DEFAULT_BASE_URL + url = URL(str(url_value)) + if url.scheme not in {"http", "https"}: + msg = "base_url must use http or https scheme." + raise PdfRestConfigurationError(msg) + return ( + url + if not url.path or url.path == "/" + else url.copy_with(path=url.path.rstrip("/")) + ) + + @field_validator("api_key") + @classmethod + def _validate_api_key(cls, value: str | None) -> str | None: + if value is None: + return None + trimmed = value.strip() + if not trimmed: + return None + return trimmed + + @field_validator("headers", mode="before") + @classmethod + def _validate_headers(cls, value: Any) -> dict[str, str]: + if value is None: + return {} + converted: dict[str, str] = {} + for key, item in dict(value).items(): + converted[str(key)] = str(item) + return converted + + @field_validator("timeout", mode="before") + @classmethod + def _validate_timeout(cls, value: Any) -> TimeoutTypes: + if value is None: + return _default_timeout() + if isinstance(value, (int, float)): + return float(value) + if isinstance(value, httpx.Timeout): + return value + msg = "timeout must be a float (seconds) or httpx.Timeout instance." + raise PdfRestConfigurationError(msg) + + +class _RequestModel(BaseModel): + """Internal request data validated prior to dispatch.""" + + method: HttpMethod + endpoint: str + params: dict[str, QueryParamValue] | None = None + headers: dict[str, str] = Field(default_factory=dict) + timeout: TimeoutTypes + json_body: dict[str, Any] | None = None + files: Any | None = None + data: Any | None = None + + model_config = ConfigDict(arbitrary_types_allowed=True) + + @field_validator("endpoint") + @classmethod + def _validate_endpoint(cls, value: str) -> str: + if not value.startswith("/"): + msg = "endpoint must start with '/'." + raise PdfRestConfigurationError(msg) + return value + + +class _BaseApiClient(Generic[ClientType]): + """Shared logic between sync and async client variants.""" + + _config: _ClientConfig + _client: ClientType + _owns_http_client: bool + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | URL | None = None, + timeout: TimeoutTypes | None = None, + headers: AnyMapping | None = None, + ) -> None: + raw_api_key = api_key if api_key is not None else os.getenv(API_KEY_ENV_VAR) + resolved_api_key = ( + raw_api_key.strip() if raw_api_key and raw_api_key.strip() else None + ) + + resolved_base_url = ( + URL(str(base_url)) if base_url is not None else URL(DEFAULT_BASE_URL) + ) + + if resolved_api_key is None and self._base_url_requires_api_key( + resolved_base_url + ): + msg = ( + "API key is required when communicating with pdfRest-hosted " + "endpoints. Provide `api_key` or set the PDFREST_API_KEY environment variable." + ) + raise PdfRestConfigurationError(msg) + + if resolved_api_key is not None: + self._validate_pdfrest_api_key(resolved_api_key, resolved_base_url) + + version = importlib.metadata.version("pdfrest") + default_headers: dict[str, str] = { + "Accept": "application/json", + "wsn": "pdfrest-python", + "User-Agent": f"pdfrest-python-sdk/{version}", + } + if resolved_api_key is not None: + default_headers[API_KEY_HEADER_NAME] = resolved_api_key + if headers: + for key, value in headers.items(): + default_headers[str(key)] = str(value) + + try: + self._config = _ClientConfig( + base_url=resolved_base_url, + api_key=resolved_api_key, + timeout=timeout if timeout is not None else _default_timeout(), + headers=default_headers, + ) + except PdfRestConfigurationError: + raise + except ValidationError as exc: # pragma: no cover - defensive + raise PdfRestConfigurationError(str(exc)) from exc + + @staticmethod + def _base_url_requires_api_key(url: URL) -> bool: + host = url.host or "" + return host.lower().endswith("pdfrest.com") + + @staticmethod + def _validate_pdfrest_api_key(api_key: str, url: URL) -> None: + if not _BaseApiClient._base_url_requires_api_key(url): + return + if len(api_key) != 36: + msg = "pdfRest API keys must be 36 characters (UUID format)." + raise PdfRestConfigurationError(msg) + try: + uuid.UUID(api_key) + except ValueError: + msg = "pdfRest API keys must be valid UUID strings." + raise PdfRestConfigurationError(msg) from None + + @property + def base_url(self) -> URL: + """Resolved base URL for the client.""" + + return self._config.base_url + + def _prepare_request( + self, + method: HttpMethod, + endpoint: str, + *, + query: Query | None = None, + json_body: Body | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + files: Any | None = None, + data: Any | None = None, + ) -> _RequestModel: + headers = self._compose_headers(extra_headers) + params = self._compose_query_params(query, extra_query) + json_payload = self._compose_json_body(json_body, extra_body) + if files is not None and json_payload is not None: + msg = "JSON payloads cannot be combined with multipart file uploads." + raise PdfRestConfigurationError(msg) + timeout_value = timeout if timeout is not None else self._config.timeout + + try: + request = _RequestModel( + method=method, + endpoint=endpoint, + params=params, + headers=headers, + timeout=timeout_value, + json_body=json_payload, + files=files, + data=data, + ) + except PdfRestConfigurationError: + raise + except ValidationError as exc: # pragma: no cover - defensive + raise PdfRestConfigurationError(str(exc)) from exc + return request + + def prepare_request( + self, + method: HttpMethod, + endpoint: str, + *, + query: Query | None = None, + json_body: Body | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + files: Any | None = None, + data: Any | None = None, + ) -> _RequestModel: + return self._prepare_request( + method, + endpoint, + query=query, + json_body=json_body, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + files=files, + data=data, + ) + + def _compose_headers(self, extra_headers: AnyMapping | None) -> dict[str, str]: + combined_headers: dict[str, str] = dict(self._config.headers) + if extra_headers is None: + return combined_headers + for key, value in extra_headers.items(): + combined_headers[str(key)] = str(value) + return combined_headers + + @staticmethod + def _compose_query_params( + query: Query | None, + extra_query: Query | None, + ) -> dict[str, QueryParamValue] | None: + params: dict[str, QueryParamValue] = {} + for mapping in (query, extra_query): + if mapping is None: + continue + for key, value in mapping.items(): + params[str(key)] = value + return params or None + + @staticmethod + def _compose_json_body( + json_body: Body | None, + extra_body: Body | None, + ) -> dict[str, Any] | None: + if json_body is None: + if extra_body is not None: + msg = "extra_body can only be used with JSON requests." + raise PdfRestConfigurationError(msg) + return None + payload: dict[str, Any] = dict(json_body) + if extra_body is not None: + for key, value in extra_body.items(): + payload[str(key)] = value + return payload + + def _handle_response(self, response: httpx.Response) -> Any: + if response.is_success: + return self._decode_json(response) + + message, error_payload = self._extract_error_details(response) + + if response.status_code == 401: + auth_message = message or "Authentication with pdfRest failed." + raise PdfRestAuthenticationError( + response.status_code, + message=auth_message, + response_content=error_payload, + ) + + raise PdfRestApiError( + response.status_code, message=message, response_content=error_payload + ) + + def _decode_json(self, response: httpx.Response) -> Any: + try: + return response.json() + except ValueError as exc: + raise PdfRestApiError( + response.status_code, + message="Response body is not valid JSON.", + response_content=response.text, + ) from exc + + @staticmethod + def _extract_error_details( + response: httpx.Response, + ) -> tuple[str | None, Any | None]: + try: + pdfrest_error = PdfRestErrorResponse.model_validate_json(response.content) + except ValidationError: + return None, response.text + return pdfrest_error.error, None + + +class _SyncApiClient(_BaseApiClient[httpx.Client]): + """Internal synchronous client implementation.""" + + _client: httpx.Client + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | URL | None = None, + timeout: TimeoutTypes | None = None, + headers: AnyMapping | None = None, + http_client: httpx.Client | None = None, + transport: httpx.BaseTransport | None = None, + ) -> None: + super().__init__( + api_key=api_key, + base_url=base_url, + timeout=timeout, + headers=headers, + ) + self._owns_http_client = http_client is None + self._client = http_client or httpx.Client( + base_url=self.base_url, + headers=dict(self._config.headers), + timeout=self._config.timeout, + transport=transport, + ) + + def close(self) -> None: + if self._owns_http_client: + self._client.close() + + def __enter__(self) -> _SyncApiClient: + return self + + def __exit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + self.close() + + def _send_request(self, request: _RequestModel) -> Any: + http_client = self._client + try: + response = http_client.request( + method=request.method, + url=request.endpoint, + params=request.params or None, + headers=request.headers or None, + timeout=request.timeout, + json=request.json_body, + files=request.files, + data=request.data, + ) + except httpx.HTTPError as exc: + raise translate_httpx_error(exc) from exc + return self._handle_response(response) + + def _post_file_operation( + self, + *, + endpoint: str, + payload: dict[str, Any], + payload_model: type[BaseModel], + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + job_options = payload_model.model_validate(payload) + json_body = job_options.model_dump( + mode="json", by_alias=True, exclude_none=True, exclude_unset=True + ) + request = self.prepare_request( + "POST", + endpoint, + json_body=json_body, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + raw_payload = self._send_request(request) + raw_response = PdfRestRawFileResponse.model_validate(raw_payload) + + output_ids = raw_response.ids or [] + output_files = [ + self.fetch_file_info( + str(file_id), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + for file_id in output_ids + ] + + return PdfRestFileBasedResponse.model_validate( + { + "input_id": [str(file_id) for file_id in raw_response.input_id], + "output_file": [ + file.model_dump(mode="json", by_alias=True) for file in output_files + ], + "warning": raw_response.warning, + } + ) + + def send_request(self, request: _RequestModel) -> Any: + return self._send_request(request) + + def download_file( + self, + file_id: str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> httpx.Response: + request = self.prepare_request( + "GET", + f"/resource/{file_id}", + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + http_request = self._client.build_request( + request.method, + request.endpoint, + params=request.params or None, + headers=request.headers or None, + ) + if request.timeout is not None: + timeout_value = ( + request.timeout + if isinstance(request.timeout, httpx.Timeout) + else httpx.Timeout(request.timeout) + ) + http_request.extensions["timeout"] = timeout_value.as_dict() + try: + response = self._client.send(http_request, stream=True) + except httpx.HTTPError as exc: + raise translate_httpx_error(exc) from exc + if not response.is_success: + try: + self._handle_response(response) + finally: + response.close() + return response + + def fetch_file_info( + self, + file_id: str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFile: + request = self.prepare_request( + "GET", + f"/resource/{file_id}", + query={"format": "info"}, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + payload = self._send_request(request) + return PdfRestFile.model_validate(payload) + + +class _AsyncApiClient(_BaseApiClient[httpx.AsyncClient]): + """Internal asynchronous client implementation.""" + + _client: httpx.AsyncClient + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | URL | None = None, + timeout: TimeoutTypes | None = None, + headers: AnyMapping | None = None, + http_client: httpx.AsyncClient | None = None, + transport: httpx.AsyncBaseTransport | None = None, + concurrency_limit: int = DEFAULT_FILE_INFO_CONCURRENCY, + ) -> None: + super().__init__( + api_key=api_key, + base_url=base_url, + timeout=timeout, + headers=headers, + ) + self._owns_http_client = http_client is None + self._client = http_client or httpx.AsyncClient( + base_url=self.base_url, + headers=dict(self._config.headers), + timeout=self._config.timeout, + transport=transport, + ) + self._concurrency_limit = concurrency_limit + + async def aclose(self) -> None: + if self._owns_http_client: + await self._client.aclose() + + async def __aenter__(self) -> _AsyncApiClient: + return self + + async def __aexit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + await self.aclose() + + async def _send_request(self, request: _RequestModel) -> Any: + http_client = self._client + try: + response = await http_client.request( + method=request.method, + url=request.endpoint, + params=request.params or None, + headers=request.headers or None, + timeout=request.timeout, + json=request.json_body, + files=request.files, + data=request.data, + ) + except httpx.HTTPError as exc: + raise translate_httpx_error(exc) from exc + return self._handle_response(response) + + async def _post_file_operation( + self, + *, + endpoint: str, + payload: dict[str, Any], + payload_model: type[BaseModel], + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + job_options = payload_model.model_validate(payload) + request = self.prepare_request( + "POST", + endpoint, + json_body=job_options.model_dump( + mode="json", by_alias=True, exclude_none=True, exclude_unset=True + ), + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + raw_payload = await self._send_request(request) + raw_response = PdfRestRawFileResponse.model_validate(raw_payload) + + output_ids = raw_response.ids or [] + output_files: list[PdfRestFile] = [] + semaphore = asyncio.Semaphore(self._concurrency_limit) + + async def throttled_fetch_file_info(file_id: str) -> PdfRestFile: + async with semaphore: + return await self.fetch_file_info( + str(file_id), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + if output_ids: + output_files = list( + await asyncio.gather( + *(throttled_fetch_file_info(str(file_id)) for file_id in output_ids) + ) + ) + + return PdfRestFileBasedResponse.model_validate( + { + "input_id": [str(file_id) for file_id in raw_response.input_id], + "output_file": [ + file.model_dump(mode="json", by_alias=True) for file in output_files + ], + "warning": raw_response.warning, + } + ) + + async def send_request(self, request: _RequestModel) -> Any: + return await self._send_request(request) + + async def download_file( + self, + file_id: str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> httpx.Response: + request = self.prepare_request( + "GET", + f"/resource/{file_id}", + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + http_request = self._client.build_request( + request.method, + request.endpoint, + params=request.params or None, + headers=request.headers or None, + ) + if request.timeout is not None: + timeout_value = ( + request.timeout + if isinstance(request.timeout, httpx.Timeout) + else httpx.Timeout(request.timeout) + ) + http_request.extensions["timeout"] = timeout_value.as_dict() + try: + response = await self._client.send(http_request, stream=True) + except httpx.HTTPError as exc: + raise translate_httpx_error(exc) from exc + if not response.is_success: + try: + self._handle_response(response) + finally: + await response.aclose() + return response + + async def fetch_file_info( + self, + file_id: str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFile: + request = self.prepare_request( + "GET", + f"/resource/{file_id}", + query={"format": "info"}, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + payload = await self._send_request(request) + return PdfRestFile.model_validate(payload) + + +class PdfRestFileStream: + """Streaming wrapper for synchronously downloading files from pdfRest.""" + + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + yield from self._response.iter_bytes(chunk_size) + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + yield from self._response.iter_text(chunk_size) + + def iter_lines(self) -> Iterator[str]: + yield from self._response.iter_lines() + + def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: + yield from self._response.iter_raw(chunk_size) + + def close(self) -> None: + self._response.close() + + def __enter__(self) -> PdfRestFileStream: + return self + + def __exit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + self.close() + + +class AsyncPdfRestFileStream: + """Streaming wrapper for asynchronously downloading files from pdfRest.""" + + def __init__(self, response: httpx.Response) -> None: + self._response = response + + async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + async for chunk in self._response.aiter_bytes(chunk_size): + yield chunk + + async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + async for chunk in self._response.aiter_text(chunk_size): + yield chunk + + async def iter_lines(self) -> AsyncIterator[str]: + async for line in self._response.aiter_lines(): + yield line + + async def iter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + async for chunk in self._response.aiter_raw(chunk_size): + yield chunk + + async def close(self) -> None: + await self._response.aclose() + + async def __aenter__(self) -> AsyncPdfRestFileStream: + return self + + async def __aexit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + await self.close() + + +class _FilesClient: + """Expose file-related operations for the synchronous client.""" + + def __init__(self, client: _SyncApiClient) -> None: + self._client = client + + def get( + self, + id: PdfRestFileID | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFile: + """Retrieve file metadata given a file identifier.""" + file_id = _normalize_file_id(id) + return self._client.fetch_file_info( + str(file_id), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + def create( + self, + files: UploadFiles, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by content. + + Provide either a single file specification or a sequence of file + specifications (each matching the shapes accepted by httpx). Every + uploaded part is sent using the field name ``file``. + """ + normalized_files = _normalize_upload_files(files) + request = self._client.prepare_request( + "POST", + "/upload", + files=normalized_files, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + payload = self._client.send_request(request) + file_ids = _extract_uploaded_file_ids(payload) + return [ + self._client.fetch_file_info( + file_id, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + for file_id in file_ids + ] + + def create_from_paths( + self, + file_paths: FilePathInput, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by their path. + + Each entry may be a bare path-like object or a tuple of + `(path, content_type)` / `(path, content_type, headers)` where headers + mirrors the httpx multipart header mapping. All opened file handles are + closed once the request completes. + """ + normalized_paths = _normalize_path_inputs(file_paths) + + with ExitStack() as stack: + upload_specs: list[FileTypes] = [] + for spec in normalized_paths: + path, content_type, headers = _parse_path_spec(spec) + file_obj = stack.enter_context(path.open("rb")) + filename = path.name + if headers: + upload_specs.append((filename, file_obj, content_type, headers)) + elif content_type is not None: + upload_specs.append((filename, file_obj, content_type)) + else: + upload_specs.append((filename, file_obj)) + return self.create( + upload_specs, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + def create_from_urls( + self, + urls: UrlInput, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by providing remote URLs.""" + + normalized_urls = UploadURLs.model_validate({"url": urls}) # pyright: ignore[reportPrivateUsage] + request = self._client.prepare_request( + "POST", + "/upload", + json_body=normalized_urls.model_dump(mode="json"), + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + payload = self._client.send_request(request) + file_ids = _extract_uploaded_file_ids(payload) + return [ + self._client.fetch_file_info( + file_id, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + for file_id in file_ids + ] + + def read_bytes( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> bytes: + response = self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + return response.read() + finally: + response.close() + + def read_text( + self, + file_ref: PdfRestFile | str, + *, + encoding: str = "utf-8", + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> str: + response = self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + response.encoding = encoding + data = response.read() + codec = response.encoding or encoding or "utf-8" + return data.decode(codec) + finally: + response.close() + + def read_json( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> Any: + response = self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + data = response.read() + codec = response.encoding or "utf-8" + return json.loads(data.decode(codec)) + finally: + response.close() + + def write_bytes( + self, + file_ref: PdfRestFile | str, + destination: DestinationPath, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> Path: + response = self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + path = Path(destination) + try: + with path.open("wb") as file_handle: + for chunk in response.iter_bytes(): + file_handle.write(chunk) + finally: + response.close() + return path + + def stream( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileStream: + response = self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + return PdfRestFileStream(response) + + +class _AsyncFilesClient: + """Expose file-related operations for the asynchronous client.""" + + def __init__( + self, + client: _AsyncApiClient, + *, + concurrency_limit: int = DEFAULT_FILE_INFO_CONCURRENCY, + ) -> None: + self._client = client + self._concurrency_limit = concurrency_limit + + async def get( + self, + id: PdfRestFileID | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFile: + """Retrieve file metadata given a file identifier.""" + file_id = _normalize_file_id(id) + return await self._client.fetch_file_info( + str(file_id), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + async def create( + self, + files: UploadFiles, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by content. + + Provide either a single file specification or a sequence of file + specifications (each matching the shapes accepted by httpx). Every + uploaded part is sent using the field name ``file``. + """ + normalized_files = _normalize_upload_files(files) + request = self._client.prepare_request( + "POST", + "/upload", + files=normalized_files, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + payload = await self._client.send_request(request) + file_ids = _extract_uploaded_file_ids(payload) + semaphore = asyncio.Semaphore(self._concurrency_limit) + + async def fetch(file_id: str) -> PdfRestFile: + async with semaphore: + return await self._client.fetch_file_info( + file_id, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + return await asyncio.gather(*(fetch(file_id) for file_id in file_ids)) + + async def create_from_paths( + self, + file_paths: FilePathInput, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by their path. + + Each entry may be a bare path-like object or a tuple of + `(path, content_type)` / `(path, content_type, headers)` where headers + mirrors the httpx multipart header mapping. All opened file handles are + closed once the request completes. + """ + normalized_paths = _normalize_path_inputs(file_paths) + + with ExitStack() as stack: + upload_specs: list[FileTypes] = [] + for spec in normalized_paths: + path, content_type, headers = _parse_path_spec(spec) + file_obj = stack.enter_context(path.open("rb")) + filename = path.name + if headers: + upload_specs.append((filename, file_obj, content_type, headers)) + elif content_type is not None: + upload_specs.append((filename, file_obj, content_type)) + else: + upload_specs.append((filename, file_obj)) + return await self.create( + upload_specs, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + async def create_from_urls( + self, + urls: UrlInput, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> list[PdfRestFile]: + """Upload one or more files by providing remote URLs.""" + + normalized_urls = UploadURLs.model_validate({"url": urls}) + request = self._client.prepare_request( + "POST", + "/upload", + json_body=normalized_urls.model_dump(mode="json"), + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + payload = await self._client.send_request(request) + file_ids = _extract_uploaded_file_ids(payload) + semaphore = asyncio.Semaphore(self._concurrency_limit) + + async def fetch(file_id: str) -> PdfRestFile: + async with semaphore: + return await self._client.fetch_file_info( + file_id, + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + + return await asyncio.gather(*(fetch(file_id) for file_id in file_ids)) + + async def read_bytes( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> bytes: + response = await self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + return await response.aread() + finally: + await response.aclose() + + async def read_text( + self, + file_ref: PdfRestFile | str, + *, + encoding: str = "utf-8", + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> str: + response = await self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + response.encoding = encoding + data = await response.aread() + codec = response.encoding or encoding or "utf-8" + return data.decode(codec) + finally: + await response.aclose() + + async def read_json( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> Any: + response = await self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + try: + data = await response.aread() + codec = response.encoding or "utf-8" + return json.loads(data.decode(codec)) + finally: + await response.aclose() + + async def write_bytes( + self, + file_ref: PdfRestFile | str, + destination: DestinationPath, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> Path: + response = await self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + path = Path(destination) + try: + with path.open("wb") as file_handle: + async for chunk in response.aiter_bytes(): + file_handle.write(chunk) + finally: + await response.aclose() + return path + + async def stream( + self, + file_ref: PdfRestFile | str, + *, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + timeout: TimeoutTypes | None = None, + ) -> AsyncPdfRestFileStream: + response = await self._client.download_file( + _resolve_file_id(file_ref), + extra_query=extra_query, + extra_headers=extra_headers, + timeout=timeout, + ) + return AsyncPdfRestFileStream(response) + + +class PdfRestClient(_SyncApiClient): + """Synchronous client for interacting with the pdfrest API.""" + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | URL | None = None, + timeout: TimeoutTypes | None = None, + headers: AnyMapping | None = None, + http_client: httpx.Client | None = None, + transport: httpx.BaseTransport | None = None, + ) -> None: + """Create a synchronous pdfRest client.""" + + super().__init__( + api_key=api_key, + base_url=base_url, + timeout=timeout, + headers=headers, + http_client=http_client, + transport=transport, + ) + self._files_client = _FilesClient(self) + + def __enter__(self) -> PdfRestClient: + super().__enter__() + return self + + def __exit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + super().__exit__(exc_type, exc, traceback) + + @property + def files(self) -> _FilesClient: + return self._files_client + + def up( + self, + *, + extra_headers: AnyMapping | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> UpResponse: + """Call the `/up` health endpoint and return server metadata.""" + + request = self._prepare_request( + "GET", + "/up", + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + payload = self._send_request(request) + return UpResponse.model_validate(payload) + + def _convert_to_graphic( + self, + *, + endpoint: str, + payload: dict[str, Any], + payload_model: type[BasePdfRestGraphicPayload[Any]], + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + return self._post_file_operation( + endpoint=endpoint, + payload=payload, + payload_model=payload_model, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def query_pdf_info( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + queries: Sequence[PdfInfoQuery] | PdfInfoQuery = ALL_PDF_INFO_QUERIES, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestInfoResponse: + """Query pdfRest for metadata describing a PDF document.""" + + payload = PdfInfoPayload.model_validate({"file": file, "queries": queries}) + request = self.prepare_request( + "POST", + "/pdf-info", + json_body=payload.model_dump( + mode="json", by_alias=True, exclude_none=True, exclude_defaults=True + ), + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + raw_payload = self._send_request(request) + return PdfRestInfoResponse.model_validate(raw_payload) + + def preview_redactions( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + redactions: PdfRedactionInstruction | Sequence[PdfRedactionInstruction], + output: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Generate a PDF redaction preview with annotated redaction rectangles.""" + + payload: dict[str, Any] = { + "files": file, + "redactions": redactions, + } + if output is not None: + payload["output"] = output + + return self._post_file_operation( + endpoint="/pdf-with-redacted-text-preview", + payload=payload, + payload_model=PdfRedactionPreviewPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def apply_redactions( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + rgb_color: PdfRGBColor | Sequence[int] | None = None, + output: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Apply previously previewed redactions and return the final redacted PDF.""" + + payload: dict[str, Any] = { + "files": file, + } + if rgb_color is not None: + payload["rgb_color"] = rgb_color + if output is not None: + payload["output"] = output + + return self._post_file_operation( + endpoint="/pdf-with-redacted-text-applied", + payload=payload, + payload_model=PdfRedactionApplyPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def split_pdf( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + page_groups: Sequence[PdfPageSelection] | PdfPageSelection | None = None, + output_prefix: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Split a PDF into one or more PDF files based on the provided page groups.""" + + payload: dict[str, Any] = {"files": file} + if page_groups is not None: + payload["page_groups"] = page_groups + if output_prefix is not None: + payload["output_prefix"] = output_prefix + + return self._post_file_operation( + endpoint="/split-pdf", + payload=payload, + payload_model=PdfSplitPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def merge_pdfs( + self, + sources: Sequence[PdfMergeInput], + *, + output_prefix: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Merge multiple PDFs (or page subsets) into a single PDF file.""" + + payload: dict[str, Any] = {"sources": sources} + if output_prefix is not None: + payload["output_prefix"] = output_prefix + + return self._post_file_operation( + endpoint="/merged-pdf", + payload=payload, + payload_model=PdfMergePayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def convert_to_png( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "rgba", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Convert one or more pdfRest files to PNG images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return self._convert_to_graphic( + endpoint="/png", + payload=payload, + payload_model=PngPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def convert_to_bmp( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Convert one or more pdfRest files to BMP images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return self._convert_to_graphic( + endpoint="/bmp", + payload=payload, + payload_model=BmpPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def convert_to_gif( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Convert one or more pdfRest files to GIF images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return self._convert_to_graphic( + endpoint="/gif", + payload=payload, + payload_model=GifPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def convert_to_jpeg( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "cmyk", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + jpeg_quality: int | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Convert one or more pdfRest files to JPEG images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + if jpeg_quality is not None: + payload["jpeg_quality"] = jpeg_quality + + return self._convert_to_graphic( + endpoint="/jpg", + payload=payload, + payload_model=JpegPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + def convert_to_tiff( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "rgba", "cmyk", "lab", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Convert one or more pdfRest files to TIFF images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return self._convert_to_graphic( + endpoint="/tif", + payload=payload, + payload_model=TiffPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + +class AsyncPdfRestClient(_AsyncApiClient): + """Asynchronous client for interacting with the pdfrest API.""" + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | URL | None = None, + timeout: TimeoutTypes | None = None, + headers: AnyMapping | None = None, + http_client: httpx.AsyncClient | None = None, + transport: httpx.AsyncBaseTransport | None = None, + concurrency_limit: int = DEFAULT_FILE_INFO_CONCURRENCY, + ) -> None: + """Create an asynchronous pdfRest client.""" + + super().__init__( + api_key=api_key, + base_url=base_url, + timeout=timeout, + headers=headers, + http_client=http_client, + transport=transport, + concurrency_limit=concurrency_limit, + ) + self._files_client = _AsyncFilesClient(self) + + async def __aenter__(self) -> AsyncPdfRestClient: + await super().__aenter__() + return self + + async def __aexit__(self, exc_type: Any, exc: Any, traceback: Any) -> None: + await super().__aexit__(exc_type, exc, traceback) + + @property + def files(self) -> _AsyncFilesClient: + return self._files_client + + async def query_pdf_info( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + queries: Sequence[PdfInfoQuery] | PdfInfoQuery = ALL_PDF_INFO_QUERIES, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestInfoResponse: + """Query pdfRest for metadata describing a PDF document asynchronously.""" + + payload = PdfInfoPayload.model_validate({"file": file, "queries": queries}) + request = self.prepare_request( + "POST", + "/pdf-info", + json_body=payload.model_dump( + mode="json", by_alias=True, exclude_none=True, exclude_defaults=True + ), + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + raw_payload = await self._send_request(request) + return PdfRestInfoResponse.model_validate(raw_payload) + + async def preview_redactions( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + redactions: PdfRedactionInstruction | Sequence[PdfRedactionInstruction], + output: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously generate a PDF redaction preview.""" + + payload: dict[str, Any] = { + "files": file, + "redactions": redactions, + } + if output is not None: + payload["output"] = output + + return await self._post_file_operation( + endpoint="/pdf-with-redacted-text-preview", + payload=payload, + payload_model=PdfRedactionPreviewPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def apply_redactions( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + rgb_color: PdfRGBColor | Sequence[int] | None = None, + output: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously apply PDF redactions.""" + + payload: dict[str, Any] = { + "files": file, + } + if rgb_color is not None: + payload["rgb_color"] = rgb_color + if output is not None: + payload["output"] = output + + return await self._post_file_operation( + endpoint="/pdf-with-redacted-text-applied", + payload=payload, + payload_model=PdfRedactionApplyPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def up( + self, + *, + extra_headers: AnyMapping | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> UpResponse: + """Call the `/up` health endpoint asynchronously and return server metadata.""" + + request = self._prepare_request( + "GET", + "/up", + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + payload = await self._send_request(request) + return UpResponse.model_validate(payload) + + async def _convert_to_graphic( + self, + *, + endpoint: str, + payload: dict[str, Any], + payload_model: type[BasePdfRestGraphicPayload[Any]], + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + return await self._post_file_operation( + endpoint=endpoint, + payload=payload, + payload_model=payload_model, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def split_pdf( + self, + file: PdfRestFile | Sequence[PdfRestFile], + *, + page_groups: Sequence[PdfPageSelection] | PdfPageSelection | None = None, + output_prefix: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously split a PDF into one or more PDF files.""" + + payload: dict[str, Any] = {"files": file} + if page_groups is not None: + payload["page_groups"] = page_groups + if output_prefix is not None: + payload["output_prefix"] = output_prefix + + return await self._post_file_operation( + endpoint="/split-pdf", + payload=payload, + payload_model=PdfSplitPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def merge_pdfs( + self, + sources: Sequence[PdfMergeInput], + *, + output_prefix: str | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously merge multiple PDFs (or page subsets) into a single PDF.""" + + payload: dict[str, Any] = {"sources": sources} + if output_prefix is not None: + payload["output_prefix"] = output_prefix + + return await self._post_file_operation( + endpoint="/merged-pdf", + payload=payload, + payload_model=PdfMergePayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def convert_to_png( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "rgba", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously convert one or more pdfRest files to PNG images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return await self._convert_to_graphic( + endpoint="/png", + payload=payload, + payload_model=PngPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def convert_to_bmp( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously convert one or more pdfRest files to BMP images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return await self._convert_to_graphic( + endpoint="/bmp", + payload=payload, + payload_model=BmpPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def convert_to_gif( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously convert one or more pdfRest files to GIF images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return await self._convert_to_graphic( + endpoint="/gif", + payload=payload, + payload_model=GifPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def convert_to_jpeg( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "cmyk", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + jpeg_quality: int | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously convert one or more pdfRest files to JPEG images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + if jpeg_quality is not None: + payload["jpeg_quality"] = jpeg_quality + + return await self._convert_to_graphic( + endpoint="/jpg", + payload=payload, + payload_model=JpegPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) + + async def convert_to_tiff( + self, + files: PdfRestFile | Sequence[PdfRestFile], + *, + output_prefix: str | None = None, + page_range: str | Sequence[str] | None = None, + resolution: int = 300, + color_model: Literal["rgb", "rgba", "cmyk", "lab", "gray"] = "rgb", + smoothing: Literal["none", "all", "text", "line", "image"] + | Sequence[Literal["none", "all", "text", "line", "image"]] + | None = None, + extra_query: Query | None = None, + extra_headers: AnyMapping | None = None, + extra_body: Body | None = None, + timeout: TimeoutTypes | None = None, + ) -> PdfRestFileBasedResponse: + """Asynchronously convert one or more pdfRest files to TIFF images.""" + + payload: dict[str, Any] = { + "files": files, + "resolution": resolution, + "color_model": color_model, + } + if output_prefix is not None: + payload["output_prefix"] = output_prefix + if page_range is not None: + payload["page_range"] = page_range + if smoothing is not None: + payload["smoothing"] = smoothing + + return await self._convert_to_graphic( + endpoint="/tif", + payload=payload, + payload_model=TiffPdfRestPayload, + extra_query=extra_query, + extra_headers=extra_headers, + extra_body=extra_body, + timeout=timeout, + ) diff --git a/src/pdfrest/exceptions.py b/src/pdfrest/exceptions.py new file mode 100644 index 00000000..d08556e1 --- /dev/null +++ b/src/pdfrest/exceptions.py @@ -0,0 +1,79 @@ +"""Library-specific exception types for the pdfrest client.""" + +from __future__ import annotations + +from typing import Any + +import httpx + +__all__ = ( + "PdfRestApiError", + "PdfRestAuthenticationError", + "PdfRestConfigurationError", + "PdfRestError", + "PdfRestRequestError", + "PdfRestTimeoutError", + "PdfRestTransportError", + "translate_httpx_error", +) + + +class PdfRestError(Exception): + """Base exception for all pdfrest client errors.""" + + +class PdfRestConfigurationError(PdfRestError): + """Raised when the client is misconfigured (for example, missing API key).""" + + +class PdfRestTimeoutError(PdfRestError): + """Raised when a request to pdfrest exceeds the configured timeout.""" + + +class PdfRestTransportError(PdfRestError): + """Raised when a transport-level error occurs while communicating with pdfrest.""" + + +class PdfRestRequestError(PdfRestError): + """Raised when the request fails before receiving a response.""" + + +class PdfRestApiError(PdfRestError): + """Raised when the pdfrest API returns a non-successful response.""" + + def __init__( + self, + status_code: int, + message: str | None = None, + response_content: Any | None = None, + ) -> None: + self.status_code = status_code + self.response_content = response_content + detail = message or f"pdfRest API returned status code {status_code}" + super().__init__(detail) + + def __str__(self) -> str: # pragma: no cover - mirrors Exception.__str__ + base = super().__str__() + if self.response_content is None: + return base + return f"{base}: {self.response_content}" + + +class PdfRestAuthenticationError(PdfRestApiError): + """Raised when authentication with the pdfRest API fails.""" + + +def translate_httpx_error(exc: httpx.HTTPError) -> PdfRestError: + """Convert an httpx exception into a library-specific exception.""" + + if isinstance(exc, httpx.TimeoutException): + return PdfRestTimeoutError( + str(exc) or "Request timed out while calling pdfRest." + ) + if isinstance(exc, httpx.TransportError): + return PdfRestTransportError( + str(exc) or "Transport-level error while calling pdfRest." + ) + return PdfRestRequestError( + str(exc) or "Request failed before receiving a response from pdfRest." + ) diff --git a/src/pdfrest/models/__init__.py b/src/pdfrest/models/__init__.py new file mode 100644 index 00000000..e88b2f3d --- /dev/null +++ b/src/pdfrest/models/__init__.py @@ -0,0 +1,17 @@ +from .public import ( + PdfRestErrorResponse, + PdfRestFile, + PdfRestFileBasedResponse, + PdfRestFileID, + PdfRestInfoResponse, + UpResponse, +) + +__all__ = [ + "PdfRestErrorResponse", + "PdfRestFile", + "PdfRestFileBasedResponse", + "PdfRestFileID", + "PdfRestInfoResponse", + "UpResponse", +] diff --git a/src/pdfrest/models/_internal.py b/src/pdfrest/models/_internal.py new file mode 100644 index 00000000..887bc4ad --- /dev/null +++ b/src/pdfrest/models/_internal.py @@ -0,0 +1,574 @@ +from __future__ import annotations + +import json +import re +from collections.abc import Callable, Sequence +from pathlib import PurePath +from typing import Annotated, Any, Generic, Literal, TypeVar + +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + HttpUrl, + PlainSerializer, + model_serializer, + model_validator, +) + +from pdfrest.types.public import PdfRedactionPreset + +from ..types import PdfInfoQuery +from . import PdfRestFile +from .public import PdfRestFileID + + +def _ensure_list(value: Any) -> Any: + if value is None: + return None + if isinstance(value, list): + return value + if isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)): + return list(value) + return [value] + + +def _list_of_strings(value: list[Any]) -> list[str]: + return [str(e) for e in value] + + +def _validate_output_prefix(value: str | None) -> str | None: + """Validate output prefix to prevent directory traversal and reserved or unsafe names.""" + if value is None: + return None + if "/" in value or "\\" in value or ":" in value: + msg = "The output prefix must not contain a directory separator." + raise ValueError(msg) + if value.startswith("."): + msg = "The output prefix must not start with a `.`." + raise ValueError(msg) + if ".." in value: + msg = "The output prefix must not contain `..`." + raise ValueError(msg) + basename = PurePath(value).name + if value != basename: + msg = "The output prefix must not include directory components." + raise ValueError(msg) + if basename in {"profile.json", "metadata.json"}: + msg = "The output prefix is a reserved name." + raise ValueError(msg) + special_chars_pattern = r"[`!@#$%^&*()+=\[\]{};':\"\\|,<>?~]" + matches = re.findall(special_chars_pattern, value) + if matches: + violations: list[str] = [] + for char in matches: + if char not in violations: + violations.append(char) + msg = ( + "The output prefix must not contain special characters: " + + ", ".join(repr(char) for char in violations) + + "." + ) + raise ValueError(msg) + return value + + +def _split_comma_list(value: Any) -> Any: + if isinstance(value, str): + return value.split(",") + if isinstance(value, list): + return value + if isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)): + return list(value) + msg = "Must be a comma separated string or a list of strings." + raise ValueError(msg) + + +def _split_comma_string(value: Any) -> list[Any] | None: + if value is None: + return None + if isinstance(value, str): + return value.split(",") + if isinstance(value, Sequence) and not isinstance(value, (bytes, bytearray, str)): + return list(value) + msg = "Must be a list, or a comma separated string." + raise ValueError(msg) + + +def _pdfrest_file_to_id(value: Any) -> Any: + if isinstance(value, PdfRestFile): + return value.id + return value + + +def _serialize_as_first_file_id(value: list[PdfRestFile]) -> str: + return str(value[0].id) + + +def _serialize_as_comma_separated_string(value: list[Any] | None) -> str | None: + if value is None: + return None + return ",".join(str(element) for element in value) + + +def _serialize_page_ranges(value: list[str | int | tuple[str | int, ...]]) -> str: + def join_tuple(value: str | int | tuple[str | int, ...]) -> str: + if isinstance(value, tuple): + return "-".join(str(e) for e in value) + return str(value) + + return ",".join(join_tuple(v) for v in value) + + +def _serialize_grouped_page_ranges( + value: list[list[str | int | tuple[str | int, ...]]], +) -> list[str]: + return [_serialize_page_ranges(v) for v in value] + + +def _serialize_redactions(value: list[_PdfRedactionVariant]) -> str: + payload = [entry.model_dump(mode="json", exclude_none=True) for entry in value] + return json.dumps(payload, separators=(",", ":")) + + +def _allowed_mime_types( + allowed_mime_types: str, *more_allowed_mime_types: str, error_msg: str | None +) -> Callable[[Any], Any]: + combined_allowed_mime_types = [allowed_mime_types, *more_allowed_mime_types] + + def allowed_mime_types_validator( + value: PdfRestFile | list[PdfRestFile], + ) -> PdfRestFile | list[PdfRestFile]: + if isinstance(value, list): + for item in value: + allowed_mime_types_validator(item) + return value + if value.type not in combined_allowed_mime_types: + msg = error_msg or f"The file type must be one of: {allowed_mime_types}" + raise ValueError(msg) + return value + + return allowed_mime_types_validator + + +def _int_to_string(value: Any) -> Any: + if isinstance(value, int): + return str(value) + if isinstance(value, list): + return [_int_to_string(item) for item in value] + return value + + +class UploadURLs(BaseModel): + url: Annotated[ + list[HttpUrl] | HttpUrl, + Field(min_length=1), + BeforeValidator(_list_of_strings), + BeforeValidator(_ensure_list), + ] + + +PageNumber = Annotated[int, Field(ge=1), PlainSerializer(lambda x: str(x))] + + +def _split_page_range_tuple(x: str) -> tuple[str, str]: + start, end = x.split("-", maxsplit=1) + return start, end + + +def _ascending_page_range( + range: tuple[int, int | Literal["last"]], +) -> tuple[int, int | Literal["last"]]: + start, end = range + if end != "last" and int(start) > int(end): + msg = "The start page must be less than or equal to the end page." + raise ValueError(msg) + return range + + +_PageRangeTupleWithLast = Annotated[ + tuple[PageNumber, PageNumber] + | tuple[Literal["last"], PageNumber] + | tuple[PageNumber, Literal["last"]], + BeforeValidator(_split_page_range_tuple), +] + +SplitMergePageRange = ( + Literal["even", "odd", "last"] | PageNumber | _PageRangeTupleWithLast +) + +_AscendingPageRangeTuple = Annotated[ + tuple[PageNumber, PageNumber] | tuple[PageNumber, Literal["last"]], + BeforeValidator(_split_page_range_tuple), + AfterValidator(_ascending_page_range), +] + +AscendingPageRange = PageNumber | Literal["last"] | _AscendingPageRangeTuple + + +class PdfInfoPayload(BaseModel): + """Adapt caller options into a pdfRest-ready pdf-info request payload.""" + + files: Annotated[ + list[PdfRestFile], + Field( + min_length=1, + max_length=1, + validation_alias=AliasChoices("file", "files"), + serialization_alias="id", + ), + BeforeValidator(_ensure_list), + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + PlainSerializer(_serialize_as_first_file_id), + ] + queries: Annotated[ + list[PdfInfoQuery], + Field(min_length=1), + BeforeValidator(_ensure_list), + BeforeValidator(_split_comma_list), + PlainSerializer(_serialize_as_comma_separated_string), + ] + + +RgbChannel = Annotated[int, Field(ge=0, le=255)] + + +class PdfLiteralRedactionModel(BaseModel): + type: Literal["literal"] + value: Annotated[str, Field(min_length=1)] + + +class PdfRegexRedactionModel(BaseModel): + type: Literal["regex"] + value: Annotated[str, Field(min_length=1)] + + +class PdfPresetRedactionModel(BaseModel): + type: Literal["preset"] + value: PdfRedactionPreset + + +_PdfRedactionVariant = Annotated[ + PdfLiteralRedactionModel | PdfRegexRedactionModel | PdfPresetRedactionModel, + Field(discriminator="type"), +] + + +class PdfRedactionPreviewPayload(BaseModel): + """Adapt caller options into a pdfRest-compatible redaction preview request.""" + + files: Annotated[ + list[PdfRestFile], + Field( + min_length=1, + max_length=1, + validation_alias=AliasChoices("file", "files"), + serialization_alias="id", + ), + BeforeValidator(_ensure_list), + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + PlainSerializer(_serialize_as_first_file_id), + ] + redactions: Annotated[ + list[_PdfRedactionVariant], + Field(min_length=1), + BeforeValidator(_ensure_list), + PlainSerializer(_serialize_redactions), + ] + output: Annotated[ + str | None, + Field(serialization_alias="output", min_length=1, default=None), + AfterValidator(_validate_output_prefix), + ] = None + + +class PdfRedactionApplyPayload(BaseModel): + """Adapt caller options into a pdfRest-compatible redaction application request.""" + + files: Annotated[ + list[PdfRestFile], + Field( + min_length=1, + max_length=1, + validation_alias=AliasChoices("file", "files"), + serialization_alias="id", + ), + BeforeValidator(_ensure_list), + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + PlainSerializer(_serialize_as_first_file_id), + ] + rgb_color: Annotated[ + tuple[RgbChannel, RgbChannel, RgbChannel] | None, + Field(serialization_alias="rgb_color", default=None), + BeforeValidator(_split_comma_string), + PlainSerializer(_serialize_as_comma_separated_string), + ] = None + output: Annotated[ + str | None, + Field(serialization_alias="output", min_length=1, default=None), + AfterValidator(_validate_output_prefix), + ] = None + + +ColorModelT = TypeVar("ColorModelT", bound=str) + + +class BasePdfRestGraphicPayload(BaseModel, Generic[ColorModelT]): + files: Annotated[ + list[PdfRestFile], + Field( + min_length=1, + max_length=1, + validation_alias=AliasChoices("file", "files"), + serialization_alias="id", + ), + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + BeforeValidator(_ensure_list), + PlainSerializer(_serialize_as_first_file_id), + ] + output_prefix: Annotated[ + str | None, + Field(serialization_alias="output", min_length=1, default=None), + AfterValidator(_validate_output_prefix), + ] + page_range: Annotated[ + list[AscendingPageRange] | None, + Field(serialization_alias="pages", min_length=1, default=None), + BeforeValidator(_ensure_list), + BeforeValidator(_split_comma_list), + BeforeValidator(_int_to_string), + PlainSerializer(_serialize_page_ranges), + ] + resolution: Annotated[int, Field(ge=12, le=2400, default=300)] + color_model: Annotated[ColorModelT, Field(default=...)] + smoothing: Annotated[ + list[Literal["none", "all", "text", "line", "image"]], + Field(default="none"), + BeforeValidator(_ensure_list), + BeforeValidator(_split_comma_list), + PlainSerializer(_serialize_as_comma_separated_string), + ] + + +class PngPdfRestPayload(BasePdfRestGraphicPayload[Literal["rgb", "rgba", "gray"]]): + """Adapt caller options into a pdfRest-ready PNG request payload.""" + + color_model: Annotated[Literal["rgb", "rgba", "gray"], Field(default="rgb")] + + +_DEFAULT_FULL_DOCUMENT_RANGE: list[str] = ["1-last"] + + +class PdfSplitPayload(BaseModel): + """Adapt caller options into a pdfRest-ready split request payload.""" + + files: Annotated[ + list[PdfRestFile], + Field( + min_length=1, + max_length=1, + validation_alias=AliasChoices("file", "files"), + serialization_alias="id", + ), + BeforeValidator(_ensure_list), + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + PlainSerializer(_serialize_as_first_file_id), + ] + page_groups: Annotated[ + list[ + Annotated[ + list[SplitMergePageRange], + BeforeValidator(_ensure_list), + BeforeValidator(_split_comma_string), + ] + ] + | None, + Field( + default=None, + validation_alias=AliasChoices("pages", "page_groups"), + serialization_alias="pages", + min_length=1, + ), + BeforeValidator(_ensure_list), + BeforeValidator(_int_to_string), + PlainSerializer(_serialize_grouped_page_ranges), + ] + output_prefix: Annotated[ + str | None, + Field(serialization_alias="output", min_length=1, default=None), + AfterValidator(_validate_output_prefix), + ] = None + + +class _PdfMergeItem(BaseModel): + file: Annotated[ + PdfRestFile, + AfterValidator( + _allowed_mime_types("application/pdf", error_msg="Must be a PDF file") + ), + ] + pages: Annotated[ + list[SplitMergePageRange], + Field( + min_length=1, + default_factory=lambda: list(_DEFAULT_FULL_DOCUMENT_RANGE).copy(), + ), + BeforeValidator(_list_of_strings), + BeforeValidator(_ensure_list), + PlainSerializer(_serialize_page_ranges), + ] + + @model_validator(mode="before") + @classmethod + def _transform_input(cls, data: Any) -> Any: + if isinstance(data, tuple): + if len(data) != 2: + msg = ( + "Tuple merge entries must contain exactly two items: (file, pages)." + ) + raise ValueError(msg) + file_candidate, pages = data + return {"file": file_candidate, "pages": pages} + if isinstance(data, PdfRestFile): + return {"file": data} + return data + + +class PdfMergePayload(BaseModel): + """Adapt caller options into a pdfRest-ready merge request payload.""" + + sources: Annotated[ + list[_PdfMergeItem], + Field( + min_length=2, + validation_alias=AliasChoices("sources", "documents", "files"), + ), + BeforeValidator(_ensure_list), + ] + output_prefix: Annotated[ + str | None, + Field(serialization_alias="output", min_length=1, default=None), + AfterValidator(_validate_output_prefix), + ] = None + + @model_serializer(mode="wrap") + def _serialize_pdf_merge_payload( + self, handler: Callable[[PdfMergePayload], dict[str, Any]] + ) -> dict[str, Any]: + # Invoke all the serializers on the payload, which then properly serializes + # all the fields. + payload = handler(self) + # Reorganize the serialized data into the parallel arrays that pdfRest expects + payload["type"] = ["id"] * len(self.sources) + payload["pages"] = [ + source.get("pages", _DEFAULT_FULL_DOCUMENT_RANGE[0]) + for source in payload["sources"] + ] + payload["id"] = [source["file"]["id"] for source in payload["sources"]] + del payload["sources"] + return payload + + +class BmpPdfRestPayload(BasePdfRestGraphicPayload[Literal["rgb", "gray"]]): + """Adapt caller options into a pdfRest-ready BMP request payload.""" + + color_model: Annotated[Literal["rgb", "gray"], Field(default="rgb")] + + +class GifPdfRestPayload(BasePdfRestGraphicPayload[Literal["rgb", "gray"]]): + """Adapt caller options into a pdfRest-ready GIF request payload.""" + + color_model: Annotated[Literal["rgb", "gray"], Field(default="rgb")] + + +class JpegPdfRestPayload(BasePdfRestGraphicPayload[Literal["rgb", "cmyk", "gray"]]): + """Adapt caller options into a pdfRest-ready JPEG request payload.""" + + color_model: Annotated[Literal["rgb", "cmyk", "gray"], Field(default="rgb")] + jpeg_quality: Annotated[int, Field(ge=1, le=100, default=75)] + + +class TiffPdfRestPayload( + BasePdfRestGraphicPayload[Literal["rgb", "rgba", "cmyk", "lab", "gray"]] +): + """Adapt caller options into a pdfRest-ready TIFF request payload.""" + + color_model: Annotated[ + Literal["rgb", "rgba", "cmyk", "lab", "gray"], Field(default="rgb") + ] + + +class PdfRestRawUploadedFile(BaseModel): + """The response sent by /upload is a list of these. /unzip returns files like this + with outputUrl""" + + name: Annotated[str, Field(description="The name of the file")] + id: Annotated[PdfRestFileID, Field(description="The id of the file")] + output_url: Annotated[ + str | None, + Field(description="The url of the unzipped file", alias="outputUrl"), + BeforeValidator(_ensure_list), + ] = None + + +class PdfRestRawFileResponse(BaseModel): + """The raw response from file-based pdfRest calls.""" + + # Allow all extra fields to be stored and serialized + # See: https://docs.pydantic.dev/latest/concepts/models/#extra-fields + model_config = ConfigDict(extra="allow") + + input_id: Annotated[ + list[PdfRestFileID], + Field(alias="inputId", description="The id of the input file"), + BeforeValidator(_ensure_list), + ] + output_urls: Annotated[ + list[HttpUrl] | None, + Field(alias="outputUrl", description="The url of the file"), + BeforeValidator(_ensure_list), + ] = None + output_ids: Annotated[ + list[PdfRestFileID] | None, + Field(alias="outputId", description="The id of the file"), + BeforeValidator(_ensure_list), + ] = None + files: Annotated[ + list[PdfRestRawUploadedFile] | None, + Field(description="The file(s) returned from the /unzip operation"), + BeforeValidator(_ensure_list), + ] = None + warning: Annotated[ + str | None, + Field( + description="A warning that was generated during the pdfRest operation", + ), + ] = None + + @model_validator(mode="after") + def _check_output_id_or_files(self) -> Any: + if self.output_ids is None and self.files is None: + msg = "output_id or files must be specified" + raise ValueError(msg) + return self + + @property + def ids(self) -> list[PdfRestFileID] | None: + if self.output_ids is not None: + return self.output_ids + if self.files is not None: + return [f.id for f in self.files] + return None diff --git a/src/pdfrest/models/_validators.py b/src/pdfrest/models/_validators.py new file mode 100644 index 00000000..e69de29b diff --git a/src/pdfrest/models/public.py b/src/pdfrest/models/public.py new file mode 100644 index 00000000..d572f6d6 --- /dev/null +++ b/src/pdfrest/models/public.py @@ -0,0 +1,573 @@ +"""Pydantic models for pdfrest API payloads.""" + +from __future__ import annotations + +import re +import uuid as _uuid +from datetime import date +from typing import Annotated, Any, ClassVar + +from pydantic import ( + AliasChoices, + AwareDatetime, + BaseModel, + ConfigDict, + Field, + HttpUrl, +) +from pydantic_core import CoreSchema + +__all__ = ("PdfRestErrorResponse", "PdfRestFile", "PdfRestFileID", "UpResponse") + + +class PdfRestFileID(str): + """ + A str-like type representing: + [optional '1' or '2' prefix] + [UUIDv4 with hyphens] + + Examples: + - "de305d2-b6a0-4b5d-9a55-4e4e6d8c2d39" # no prefix + - "1de305d2-b6a0-4b5d-9a55-4e4e6d8c2d39" # prefix '1' + - "2DE305D2-B6A0-4B5D-9A55-4E4E6D8C2D39" # prefix '2' (upper-case input accepted) + + Canonical representation is lowercase. + """ + + __slots__ = () + + # For Python validation (case-insensitive) + _PY_PATTERN: ClassVar[re.Pattern[str]] = re.compile( + r"^(?:[12])?(?:[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})$", + re.IGNORECASE, + ) + + # For JSON Schema (no inline flags; must include both cases) + _PATTERN_STR: ClassVar[str] = ( + r"^(?:[12])?(?:[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89ABab][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12})$" + ) + + def __new__(cls, value: str) -> PdfRestFileID: + if not isinstance(value, str): + msg = "PdfRestPrefixedUUID4 requires a str" + raise TypeError(msg) + if not cls._PY_PATTERN.fullmatch(value): + msg = ( + "Invalid PdfRestPrefixedUUID4. Expected: " + "optional '1' or '2' prefix + UUIDv4 with hyphens and RFC 4122 variant" + ) + raise ValueError(msg) + return str.__new__(cls, value.lower()) + + def __str__(self) -> str: + return str.__str__(self) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + @property + def prefix(self) -> str | None: + """ + The leading prefix digit ('1' or '2') if present, else None. + + Note: Presence is unambiguous by length: + - 36 chars => no prefix + - 37 chars => prefix present + """ + return self[0] if len(self) == 37 else None + + @property + def uuid(self) -> str: + """The UUID part (without the optional prefix).""" + return self[1:] if len(self) == 37 else self + + @property + def uuid_obj(self) -> _uuid.UUID: + """The UUID object for the UUID part.""" + return _uuid.UUID(self.uuid) + + @classmethod + def is_valid(cls, value: str) -> bool: + """Quick validity check without constructing the object.""" + return isinstance(value, str) and bool(cls._PY_PATTERN.fullmatch(value)) + + @classmethod + def from_parts( + cls, u: str | _uuid.UUID, prefix: int | str | None = None + ) -> PdfRestFileID: + """ + Build from a UUIDv4 (str or uuid.UUID) and an optional prefix (1 or 2). + Raises ValueError if not a v4 UUID or bad prefix. + """ + if isinstance(prefix, int): + prefix = str(prefix) # allow 1/2 as int + if prefix not in (None, "1", "2"): + msg = "prefix must be None, '1', or '2'" + raise ValueError(msg) + + if isinstance(u, _uuid.UUID): + if u.version != 4: + msg = "UUID must be version 4" + raise ValueError(msg) + u_text = str(u) + else: + u_text = str(u) + if not re.fullmatch( + r"[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89ABab][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12}", + u_text, + ): + msg = "UUID text must be version 4 with RFC 4122 variant" + raise ValueError(msg) + + return cls((prefix or "") + u_text) + + @classmethod + def generate(cls, prefix: int | str | None = None) -> PdfRestFileID: + """Generate a new value with an optional prefix (1 or 2).""" + if isinstance(prefix, int): + prefix = str(prefix) + if prefix not in (None, "1", "2"): + msg = "prefix must be None, '1', or '2'" + raise ValueError(msg) + return cls.from_parts(_uuid.uuid4(), prefix=prefix) + + # ------------------------- + # Pydantic v2 integration + # ------------------------- + @classmethod + def __get_pydantic_core_schema__(cls, source_type: Any, handler: Any) -> CoreSchema: + """ + Build a Pydantic v2 core schema that accepts: + - a UUID (validated as v4) -> converted to this type (no prefix) + - a string matching our pattern + """ + from pydantic_core import core_schema + + str_schema = core_schema.str_schema(pattern=cls._PATTERN_STR) + uuid_schema = core_schema.uuid_schema(version=4) + union = core_schema.union_schema([uuid_schema, str_schema]) + + def to_class(v: Any) -> PdfRestFileID: + if isinstance(v, _uuid.UUID): + # UUID input: no prefix + return cls(str(v)) + # string already pattern-validated by inner schema + return cls(v) + + return core_schema.no_info_after_validator_function( + to_class, + union, + serialization=core_schema.to_string_ser_schema(), + ) + + @classmethod + def __get_pydantic_json_schema__(cls, core_schema: Any, handler: Any) -> dict: + """ + Provide a clean JSON Schema for OpenAPI/JSON Schema generators. + """ + # Prefer a single-string schema with pattern and examples + return { + "type": "string", + "title": cls.__name__, + "description": "UUIDv4 with hyphens, optionally prefixed by '1' or '2'.", + "pattern": cls._PATTERN_STR, + "examples": [ + "de305d2-b6a0-4b5d-9a55-4e4e6d8c2d39", + "1de305d2-b6a0-4b5d-9a55-4e4e6d8c2d39", + ], + } + + +class UpResponse(BaseModel): + """Response payload returned by the `/up` health endpoint.""" + + status: str + product: str + release_date: date = Field(alias="releaseDate") + version: str + + model_config = ConfigDict(frozen=True) + + +class PdfRestErrorResponse(BaseModel): + """Error response payloads from pdfRest.""" + + error: str | None = Field(alias="message") + model_config = ConfigDict(extra="allow", frozen=True) + + +class PdfRestFile(BaseModel): + """Represents a file on the pdfRest server.""" + + id: PdfRestFileID = Field( + min_length=1, + description="Identifier of the file on the pdfRest server", + ) + name: str = Field( + min_length=1, + description="Name of the file", + ) + url: HttpUrl = Field( + description="URL from which the file can be downloaded", + ) + type: str = Field( + min_length=1, + description="MIME type of the file", + ) + size: int = Field( + description="Size of the file", + ) + modified: AwareDatetime = Field( + description="The last modified time of the file, which must include time zone " + "info.", + ) + scheduled_deletion_time_utc: AwareDatetime | None = Field( + alias="scheduledDeletionTimeUtc", + default=None, + description="The UTC time at which the file will be deleted from the server.", + ) + + model_config = ConfigDict(frozen=True) + + +class PdfRestFileBasedResponse(BaseModel): + """ + Represents a response from a pdfRest API operation that is file-based, allowing + handling of input and output files along with additional warnings. + """ + + # Allow all extra fields to be stored and serialized + # See: https://docs.pydantic.dev/latest/concepts/models/#extra-fields + model_config = ConfigDict(extra="allow") + + input_ids: Annotated[ + list[PdfRestFileID], + Field( + description="The ids of the files that were input to the pdfRest operation", + min_length=1, + validation_alias=AliasChoices("input_id", "inputId"), + ), + ] + + # Optional because some endpoints may not make output + output_files: Annotated[ + list[PdfRestFile], + Field( + description="The list of files returned by the pdfRest operation", + min_length=1, + validation_alias=AliasChoices("output_file", "outputFile"), + ), + ] + + warning: Annotated[ + str | None, + Field( + description="A warning that was generated during the pdfRest operation", + ), + ] = None + + @property + def input_id(self) -> PdfRestFileID: + if len(self.input_ids) == 1: + return self.input_ids[0] + if len(self.input_ids) == 0: + msg = "no input id was specified" + else: + msg = "multiple input ids were specified" + raise ValueError(msg) + + @property + def output_file(self) -> PdfRestFile: + if len(self.output_files) == 1: + return self.output_files[0] + if len(self.output_files) == 0: + msg = "no output file was returned by the pdfRest operation" + else: + msg = "multiple output files were returned by the pdfRest operation" + raise ValueError(msg) + + +class PdfRestInfoResponse(BaseModel): + """A response containing the output from the /info route.""" + + # Allow all extra fields to be stored and serialized + # See: https://docs.pydantic.dev/latest/concepts/models/#extra-fields + model_config = ConfigDict(extra="allow") + + input_id: Annotated[ + PdfRestFileID, + Field( + validation_alias=AliasChoices("input_id", "inputId"), + description="The id of the input file", + ), + ] + tagged: Annotated[ + bool | None, + Field( + description="Indicates whether structure tags are present in the PDF " + "document. The result is true or false." + ), + ] = None + image_only: Annotated[ + bool | None, + Field( + description=( + "Indicates whether the document is 'image only,' meaning it consists " + "solely of embedded graphical images with no text or other standard " + "PDF document features except for metadata. The result is true or " + "false." + ) + ), + ] = None + title: Annotated[ + str | None, + Field( + description=( + "The title of the PDF as retrieved from the metadata. The result is a " + "string that may be empty if the document does not have a title." + ) + ), + ] = None + subject: Annotated[ + str | None, + Field( + description=( + "The subject of the PDF as retrieved from the metadata. The result is " + "a string that may be empty if the document does not have a subject." + ) + ), + ] = None + author: Annotated[ + str | None, + Field( + description=( + "The author of the PDF as retrieved from the metadata. The result is " + "a string that may be empty if the document does not have an author." + ) + ), + ] = None + producer: Annotated[ + str | None, + Field( + description=( + "The producer of the PDF as retrieved from the metadata. The result " + "is a string that may be empty if the document does not have a " + "producer." + ) + ), + ] = None + creator: Annotated[ + str | None, + Field( + description=( + "The creator of the PDF as retrieved from the metadata. The result is " + "a string that may be empty if the document does not have a creator." + ) + ), + ] = None + creation_date: Annotated[ + str | None, + Field( + description=( + "The creation date of the PDF as retrieved from the metadata. The " + "result is a string that may be empty if the document does not " + "have a creation date." + ) + ), + ] = None + modified_date: Annotated[ + str | None, + Field( + description=( + "The most recent modification date of the PDF as retrieved from the " + "metadata. The result is a string that may be empty if the document " + "does not have a modification date." + ) + ), + ] = None + keywords: Annotated[ + str | None, + Field( + description=( + "The keywords of the PDF as retrieved from the metadata. The result " + "is a string that may be empty if the document does not include " + "keywords." + ) + ), + ] = None + custom_metadata: Annotated[ + dict[str, Any] | None, + Field( + description=( + "Custom metadata entries extracted from the PDF. The result is a " + "dictionary mapping keys to their stored values, or None when no " + "custom metadata exists." + ) + ), + ] = None + doc_language: Annotated[ + str | None, + Field( + description="The language of the document as declared in its metadata. " + "The result is a string." + ), + ] = None + page_count: Annotated[ + int | None, + Field( + description="The number of pages in the PDF document. The result is an " + "integer." + ), + ] = None + contains_annotations: Annotated[ + bool | None, + Field( + description=( + "Indicates whether the PDF document contains annotations such as " + "notes, highlighted text, file attachments, crossed-out text, or text " + "callout boxes. The result is true or false." + ) + ), + ] = None + contains_signature: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains any digital signatures. " + "The result is true or false." + ), + ] = None + pdf_version: Annotated[ + str | None, + Field( + description=( + "The version of the PDF standard used to create the document. The " + "result is a string in the format X.Y.Z, where X, Y, and Z represent " + "the major, minor, and extension versions." + ) + ), + ] = None + file_size: Annotated[ + int | None, + Field( + description="The size of the PDF file in bytes. The result is an integer." + ), + ] = None + filename: Annotated[ + str | None, + Field(description="The name of the PDF file. The result is a string."), + ] = None + restrict_permissions_set: Annotated[ + bool | None, + Field( + description=( + "Indicates whether the PDF file has restricted permissions, such as " + "preventing printing, copying, or signing. The result is true or " + "false." + ) + ), + ] = None + contains_xfa: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains XFA forms. The result is " + "true or false." + ), + ] = None + contains_acroforms: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains Acroforms. The result is " + "true or false." + ), + ] = None + contains_javascript: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains JavaScript. The result is " + "true or false." + ), + ] = None + contains_transparency: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains transparent objects. The " + "result is true or false." + ), + ] = None + contains_embedded_file: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains one or more embedded " + "files. The result is true or false." + ), + ] = None + uses_embedded_fonts: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains fully embedded fonts. " + "The result is true or false." + ), + ] = None + uses_nonembedded_fonts: Annotated[ + bool | None, + Field( + description="Indicates whether the PDF contains non-embedded fonts. The " + "result is true or false." + ), + ] = None + pdfa: Annotated[ + bool | None, + Field( + description="Indicates whether the document conforms to the PDF/A " + "standard. The result is true or false." + ), + ] = None + pdfua_claim: Annotated[ + bool | None, + Field( + description="Indicates whether the document claims to conform to the " + "PDF/UA standard. The result is true or false." + ), + ] = None + pdfe_claim: Annotated[ + bool | None, + Field( + description="Indicates whether the document claims to conform to the " + "PDF/E standard. The result is true or false." + ), + ] = None + pdfx_claim: Annotated[ + bool | None, + Field( + description="Indicates whether the document claims to conform to the " + "PDF/X standard. The result is true or false." + ), + ] = None + requires_password_to_open: Annotated[ + bool | None, + Field( + description=( + "Indicates whether the PDF requires a password to open. The result " + "is true or false. *Note*: A document requiring a password cannot be " + "opened by this route and will not provide much other information." + ) + ), + ] = None + all_queries_processed: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "all_queries_processed", "allQueriesProcessed" + ), + description=( + "Indicates whether all possible queries about the PDF document were " + "successfully processed. This field is required, and the result is " + "true or false." + ), + ), + ] + warning: Annotated[ + str | None, + Field( + description="A warning indicating why not all queries could be processed.", + ), + ] = None diff --git a/src/pdfrest/py.typed b/src/pdfrest/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/pdfrest/types/__init__.py b/src/pdfrest/types/__init__.py new file mode 100644 index 00000000..635be543 --- /dev/null +++ b/src/pdfrest/types/__init__.py @@ -0,0 +1,25 @@ +"""Public import surface for shared pdfrest types.""" + +from .public import ( + ALL_PDF_INFO_QUERIES, + PdfInfoQuery, + PdfMergeInput, + PdfMergeSource, + PdfPageSelection, + PdfRedactionInstruction, + PdfRedactionPreset, + PdfRedactionType, + PdfRGBColor, +) + +__all__ = [ + "ALL_PDF_INFO_QUERIES", + "PdfInfoQuery", + "PdfMergeInput", + "PdfMergeSource", + "PdfPageSelection", + "PdfRGBColor", + "PdfRedactionInstruction", + "PdfRedactionPreset", + "PdfRedactionType", +] diff --git a/src/pdfrest/types/public.py b/src/pdfrest/types/public.py new file mode 100644 index 00000000..496d9490 --- /dev/null +++ b/src/pdfrest/types/public.py @@ -0,0 +1,98 @@ +"""Public type definitions for the pdfrest client.""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Literal, cast, get_args + +from typing_extensions import Required, TypedDict + +if TYPE_CHECKING: + from pdfrest.models import PdfRestFile +else: # pragma: no cover - used only for typing at runtime + PdfRestFile = Any + +__all__ = ( + "ALL_PDF_INFO_QUERIES", + "PdfInfoQuery", + "PdfMergeInput", + "PdfMergeSource", + "PdfPageSelection", + "PdfRGBColor", + "PdfRedactionInstruction", + "PdfRedactionPreset", + "PdfRedactionType", +) + +PdfInfoQuery = Literal[ + "tagged", + "image_only", + "title", + "subject", + "author", + "producer", + "creator", + "creation_date", + "modified_date", + "keywords", + "custom_metadata", + "doc_language", + "page_count", + "contains_annotations", + "contains_signature", + "pdf_version", + "file_size", + "filename", + "restrict_permissions_set", + "contains_xfa", + "contains_acroforms", + "contains_javascript", + "contains_transparency", + "contains_embedded_file", + "uses_embedded_fonts", + "uses_nonembedded_fonts", + "pdfa", + "pdfua_claim", + "pdfe_claim", + "pdfx_claim", + "requires_password_to_open", +] + +ALL_PDF_INFO_QUERIES: tuple[PdfInfoQuery, ...] = cast( + tuple[PdfInfoQuery, ...], get_args(PdfInfoQuery) +) + +PdfRedactionType = Literal["literal", "regex", "preset"] + +PdfRedactionPreset = Literal[ + "email", + "phone_number", + "date", + "us_ssn", + "url", + "credit_card", + "credit_debit_pin", + "bank_routing_number", + "international_bank_account_number", + "swift_bic_number", + "ipv4", + "ipv6", +] + + +class PdfRedactionInstruction(TypedDict): + type: PdfRedactionType + value: PdfRedactionPreset | str + + +PdfRGBColor = tuple[int, int, int] + +PdfPageSelection = str | int | Sequence[str | int] + + +class PdfMergeSource(TypedDict, total=False): + file: Required[PdfRestFile] + pages: PdfPageSelection | None + + +PdfMergeInput = PdfRestFile | PdfMergeSource | tuple[PdfRestFile, PdfPageSelection] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..50fb297f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +import os + +import httpx +import pytest + +LIVE_BASE_URL_CANDIDATES: tuple[str, ...] = ( + "http://localhost:3000", + "https://apidev.pdfrest.com", + "https://api.pdfrest.com", +) + + +@pytest.fixture(scope="session") +def pdfrest_api_key() -> str: + key = os.getenv("PDFREST_API_KEY") + if not key: + pytest.fail("PDFREST_API_KEY is not configured.") + return key + + +@pytest.fixture(scope="session") +def pdfrest_live_base_url(pdfrest_api_key: str) -> str: + headers = {"Authorization": f"Bearer {pdfrest_api_key}"} + timeout = httpx.Timeout(2.0) + for base_url in LIVE_BASE_URL_CANDIDATES: + try: + with httpx.Client(base_url=base_url, timeout=timeout) as client: + response = client.get("/up", headers=headers) + except httpx.HTTPError: + continue + if response.is_success: + return base_url + pytest.fail("No reachable pdfRest API instance for live tests.") diff --git a/tests/graphics_test_helpers.py b/tests/graphics_test_helpers.py new file mode 100644 index 00000000..8fff8bfa --- /dev/null +++ b/tests/graphics_test_helpers.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from collections.abc import Iterable +from datetime import datetime, timezone +from typing import Any + +from pdfrest.models import PdfRestFile + +VALID_API_KEY = "12345678-1234-1234-1234-123456789abc" +ASYNC_API_KEY = "fedcba98-7654-3210-fedc-ba9876543210" + + +def build_file_info_payload(file_id: str, name: str, mime_type: str) -> dict[str, Any]: + return { + "id": file_id, + "name": name, + "url": f"https://api.pdfrest.com/resource/{file_id}", + "type": mime_type, + "size": 256, + "modified": datetime(2024, 1, 1, tzinfo=timezone.utc) + .isoformat() + .replace("+00:00", "Z"), + "scheduledDeletionTimeUtc": None, + } + + +def make_pdf_file(file_id: str, name: str = "example.pdf") -> PdfRestFile: + return PdfRestFile.model_validate( + { + "id": file_id, + "name": name, + "url": f"https://api.pdfrest.com/resource/{file_id}", + "type": "application/pdf", + "size": 1024, + "modified": datetime(2024, 1, 1, tzinfo=timezone.utc) + .isoformat() + .replace("+00:00", "Z"), + "scheduledDeletionTimeUtc": None, + } + ) + + +def assert_conversion_payload( + payload: dict[str, Any], + expected: dict[str, Any], + *, + allowed_extras: Iterable[str] | None = None, +) -> None: + for key, value in expected.items(): + assert payload[key] == value + extra_keys = set(payload) - set(expected) + permitted = {"color_model", "resolution"} + if allowed_extras is not None: + permitted.update(allowed_extras) + assert extra_keys <= permitted + if "resolution" not in expected and "resolution" in payload: + assert payload["resolution"] == 300 + if "color_model" not in expected and "color_model" in payload: + assert payload["color_model"] == "rgb" diff --git a/tests/live/__init__.py b/tests/live/__init__.py new file mode 100644 index 00000000..18b37dd9 --- /dev/null +++ b/tests/live/__init__.py @@ -0,0 +1 @@ +# Package for live integration tests. diff --git a/tests/live/test_live_graphic_conversions.py b/tests/live/test_live_graphic_conversions.py new file mode 100644 index 00000000..2b68edb3 --- /dev/null +++ b/tests/live/test_live_graphic_conversions.py @@ -0,0 +1,396 @@ +from __future__ import annotations + +from collections.abc import Iterable, Sequence +from typing import Any, NamedTuple, get_args + +import pytest + +from pdfrest import PdfRestApiError, PdfRestClient +from pdfrest.models import PdfRestFile +from pdfrest.models._internal import ( + BasePdfRestGraphicPayload, + BmpPdfRestPayload, + GifPdfRestPayload, + JpegPdfRestPayload, + PngPdfRestPayload, + TiffPdfRestPayload, +) + +from ..resources import get_test_resource_path + + +class _GraphicEndpointSpec(NamedTuple): + method_name: str + payload_model: type[BasePdfRestGraphicPayload[Any]] + + +PNG_PAYLOAD_ONLY: dict[str, _GraphicEndpointSpec] = { + "png": _GraphicEndpointSpec("convert_to_png", PngPdfRestPayload), +} + +PAYLOAD_MODELS: dict[str, _GraphicEndpointSpec] = { + **PNG_PAYLOAD_ONLY, + "bmp": _GraphicEndpointSpec("convert_to_bmp", BmpPdfRestPayload), + "gif": _GraphicEndpointSpec("convert_to_gif", GifPdfRestPayload), + "jpeg": _GraphicEndpointSpec("convert_to_jpeg", JpegPdfRestPayload), + "tiff": _GraphicEndpointSpec("convert_to_tiff", TiffPdfRestPayload), +} + + +def _enumerate_color_models( + payload_model: type[BasePdfRestGraphicPayload[Any]], +) -> Iterable[str]: + field = payload_model.model_fields["color_model"] + return get_args(field.annotation) or () + + +def _resolution_bounds( + payload_model: type[BasePdfRestGraphicPayload[Any]], +) -> tuple[int, int]: + field = payload_model.model_fields["resolution"] + ge = field.metadata[0].ge if field.metadata else 12 + le = field.metadata[1].le if len(field.metadata) > 1 else 2400 + return int(ge), int(le) + + +def _valid_color_cases() -> list[Any]: + cases = [] + for label, spec in PAYLOAD_MODELS.items(): + for color_model in _enumerate_color_models(spec.payload_model): + cases.append( + pytest.param(label, spec, color_model, id=f"{label}-{color_model}") + ) + return cases + + +def _invalid_color_cases() -> list[Any]: + cases = [] + candidates = ("lab", "rgba", "cmyk", "xyz", "ultraviolet", "infrared-spectrum") + for label, spec in PAYLOAD_MODELS.items(): + allowed = set(_enumerate_color_models(spec.payload_model)) + seen: set[str] = set() + for value in (*candidates, "not-a-color-model"): + if value in allowed or value in seen: + continue + seen.add(value) + cases.append(pytest.param(label, spec, value, id=f"{label}-{value}")) + return cases + + +_SMOOTHING_VALUES: tuple[str, ...] = ("none", "all", "text", "line", "image") + + +def _valid_smoothing_cases() -> list[Any]: + cases = [] + for label, spec in PAYLOAD_MODELS.items(): + for smoothing in _SMOOTHING_VALUES: + cases.append( + pytest.param(label, spec, smoothing, id=f"{label}-{smoothing}") + ) + return cases + + +def _invalid_smoothing_cases() -> list[Any]: + cases = [] + invalid_inputs: tuple[Any, ...] = ( + "quantum", + "super-smooth", + "line, hyperreal", + ) + for label, spec in PAYLOAD_MODELS.items(): + for candidate in invalid_inputs: + case_id = ( + f"{label}-{'-'.join(candidate)}" + if isinstance(candidate, list) + else f"{label}-{candidate}" + ) + cases.append(pytest.param(label, spec, candidate, id=case_id)) + return cases + + +@pytest.fixture(scope="module") +def uploaded_20_page_pdf( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> PdfRestFile: + resource = get_test_resource_path("20-pages.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + return client.files.create_from_paths([resource])[0] + + +@pytest.mark.parametrize( + ("_endpoint_label", "spec", "color_model"), + _valid_color_cases(), +) +def test_live_graphic_valid_color_models( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + _endpoint_label: str, + spec: _GraphicEndpointSpec, + color_model: str, +) -> None: + resource = get_test_resource_path("report.pdf") + payload_model = spec.payload_model + resolution = _resolution_bounds(payload_model)[0] + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource])[0] + client_method = getattr(client, spec.method_name) + response = client_method( + uploaded, + color_model=color_model, + resolution=resolution, + ) + assert response.output_files + + +@pytest.mark.parametrize( + ("_endpoint_label", "spec", "invalid_color"), + _invalid_color_cases(), +) +def test_live_graphic_invalid_color_model( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + _endpoint_label: str, + spec: _GraphicEndpointSpec, + invalid_color: str, +) -> None: + payload_model = spec.payload_model + + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource])[0] + client_method = getattr(client, spec.method_name) + resolution = _resolution_bounds(payload_model)[0] + with pytest.raises(PdfRestApiError): + client_method( + uploaded, + resolution=resolution, + extra_body={"color_model": invalid_color}, + ) + + +@pytest.mark.parametrize( + ("_endpoint_label", "spec"), + PNG_PAYLOAD_ONLY.items(), + ids=list(PNG_PAYLOAD_ONLY), +) +@pytest.mark.parametrize( + ("bound", "offset", "should_raise"), + [ + pytest.param("min", 0, False, id="min"), + pytest.param("max", 0, False, id="max"), + pytest.param("min", -1, True, id="below-min"), + pytest.param("max", 1, True, id="above-max"), + ], +) +def test_live_graphic_resolution_bounds( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + _endpoint_label: str, + spec: _GraphicEndpointSpec, + bound: str, + offset: int, + should_raise: bool, +) -> None: + payload_model = spec.payload_model + min_res, max_res = _resolution_bounds(payload_model) + resource = get_test_resource_path("report.pdf") + + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource])[0] + client_method = getattr(client, spec.method_name) + base_resolution = min_res if bound == "min" else max_res + call_kwargs: dict[str, Any] = {"resolution": base_resolution} + + if should_raise: + call_kwargs["extra_body"] = {"resolution": base_resolution + offset} + with pytest.raises(PdfRestApiError): + client_method(uploaded, **call_kwargs) + else: + response = client_method(uploaded, **call_kwargs) + assert response.output_files + + +@pytest.mark.parametrize( + ("_endpoint_label", "spec", "smoothing_value"), + _valid_smoothing_cases(), +) +def test_live_graphic_valid_smoothing( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + _endpoint_label: str, + spec: _GraphicEndpointSpec, + smoothing_value: str, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource])[0] + client_method = getattr(client, spec.method_name) + response = client_method( + uploaded, + smoothing=smoothing_value, + ) + assert response.output_files + + +@pytest.mark.parametrize( + ("_endpoint_label", "spec", "invalid_smoothing"), + _invalid_smoothing_cases(), +) +def test_live_graphic_invalid_smoothing( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + _endpoint_label: str, + spec: _GraphicEndpointSpec, + invalid_smoothing: Any, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource])[0] + client_method = getattr(client, spec.method_name) + with pytest.raises(PdfRestApiError): + client_method( + uploaded, + smoothing="none", + extra_body={"smoothing": invalid_smoothing}, + ) + + +@pytest.mark.parametrize( + ("page_range", "expect_success"), + [ + pytest.param("5", True, id="single"), + pytest.param("3-7", True, id="ascending-range"), + pytest.param("last", True, id="last"), + pytest.param("1-last", True, id="entire-document"), + pytest.param(["1", "3", "5-7"], True, id="list-mixed"), + ], +) +def test_live_png_page_range_variants( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_20_page_pdf: PdfRestFile, + page_range: Any, + expect_success: bool, + request: pytest.FixtureRequest, +) -> None: + case_id = request.node.callspec.id + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + info = client.query_pdf_info(uploaded_20_page_pdf) + + assert info.page_count == 20 + assert str(info.input_id) == str(uploaded_20_page_pdf.id) + assert info.filename is None or info.filename.endswith(".pdf") + + if expect_success: + response = client.convert_to_png( + uploaded_20_page_pdf, + output_prefix=f"live-range-{case_id}", + page_range=page_range, + ) + + expected_pages = _expand_page_selection(page_range, total_pages=20) + assert len(response.output_files) == len(expected_pages) + assert any( + file_info.name.endswith(".png") for file_info in response.output_files + ) + assert all( + file_info.type == "image/png" and file_info.size > 0 + for file_info in response.output_files + ) + assert str(response.input_id) == str(uploaded_20_page_pdf.id) + else: + with pytest.raises(PdfRestApiError): + client.convert_to_png( + uploaded_20_page_pdf, + output_prefix=f"live-range-{case_id}", + extra_body={"page_range": page_range}, + ) + + +@pytest.mark.parametrize( + "page_override", + [ + pytest.param("0", id="zero"), + pytest.param("last-0", id="range-with-zero"), + pytest.param("7-3", id="descending-range"), + pytest.param("even", id="even"), + pytest.param("odd", id="odd"), + pytest.param("odd,even", id="odd-even"), + ], +) +def test_live_png_page_range_invalid_overrides( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_20_page_pdf: PdfRestFile, + page_override: str, + request: pytest.FixtureRequest, +) -> None: + case_id = request.node.callspec.id + with ( + PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client, + pytest.raises(PdfRestApiError), + ): + client.convert_to_png( + uploaded_20_page_pdf, + output_prefix=f"live-range-invalid-{case_id}", + page_range="1", + extra_body={"pages": page_override}, + ) + + +def _expand_page_selection( + selection: Any, + *, + total_pages: int, +) -> list[int]: + def expand_entry(entry: Any) -> list[int]: + if isinstance(entry, int): + return [entry] + text = str(entry).strip() + lowered = text.lower() + if lowered == "even": + return list(range(2, total_pages + 1, 2)) + if lowered == "odd": + return list(range(1, total_pages + 1, 2)) + if lowered == "last": + return [total_pages] + if "-" in lowered: + start_raw, end_raw = (part.strip() for part in lowered.split("-", 1)) + + def resolve(range_token: str) -> int: + return total_pages if range_token == "last" else int(range_token) # noqa: S105 + + start = resolve(start_raw) + end = resolve(end_raw) + step = 1 if end >= start else -1 + return list(range(start, end + step, step)) + return [int(text)] + + if isinstance(selection, Sequence) and not isinstance( + selection, (str, bytes, bytearray) + ): + expanded: list[int] = [] + for segment in selection: + expanded.extend(expand_entry(segment)) + return expanded + return expand_entry(selection) diff --git a/tests/live/test_live_pdf_info.py b/tests/live/test_live_pdf_info.py new file mode 100644 index 00000000..977fe87d --- /dev/null +++ b/tests/live/test_live_pdf_info.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +from typing import Any, cast, get_args + +import pytest + +from pdfrest import AsyncPdfRestClient, PdfRestApiError, PdfRestClient +from pdfrest.models import PdfRestFile, PdfRestInfoResponse +from pdfrest.models._internal import PdfInfoPayload +from pdfrest.types import ALL_PDF_INFO_QUERIES, PdfInfoQuery + +from ..resources import get_test_resource_path + + +def _allowed_queries() -> tuple[PdfInfoQuery, ...]: + field = PdfInfoPayload.model_fields["queries"] + (item_type,) = get_args(field.annotation) + return cast(tuple[PdfInfoQuery, ...], tuple(get_args(item_type))) + + +ALLOWED_QUERIES: tuple[PdfInfoQuery, ...] = _allowed_queries() +assert ALLOWED_QUERIES == ALL_PDF_INFO_QUERIES + + +EXPECTED_VALUES: dict[PdfInfoQuery, Any] = { + "tagged": False, + "image_only": False, + "title": "", + "subject": "", + "author": "", + "producer": "", + "creator": "", + "creation_date": "", + "modified_date": "", + "keywords": "", + "custom_metadata": {}, + "doc_language": "en-US", + "page_count": 1, + "contains_annotations": False, + "contains_signature": False, + "pdf_version": "1.7.0", + "file_size": 25588, + "filename": "report.pdf", + "restrict_permissions_set": False, + "contains_xfa": False, + "contains_acroforms": False, + "contains_javascript": False, + "contains_transparency": False, + "contains_embedded_file": False, + "uses_embedded_fonts": False, + "uses_nonembedded_fonts": False, + "pdfa": False, + "pdfua_claim": False, + "pdfe_claim": False, + "pdfx_claim": False, + "requires_password_to_open": False, +} + + +def _assert_expected_value(query: PdfInfoQuery, value: Any) -> None: + assert value == EXPECTED_VALUES[query] + + +@pytest.fixture(scope="module") +def uploaded_pdf( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> PdfRestFile: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + return client.files.create_from_paths([resource])[0] + + +@pytest.mark.parametrize("query_name", ALLOWED_QUERIES, ids=list(ALLOWED_QUERIES)) +def test_live_pdf_info_queries( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf: PdfRestFile, + query_name: PdfInfoQuery, +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + response = client.query_pdf_info(uploaded_pdf, queries=query_name) + + assert isinstance(response, PdfRestInfoResponse) + assert str(response.input_id) == str(uploaded_pdf.id) + assert response.all_queries_processed is True + + value = getattr(response, query_name) + _assert_expected_value(query_name, value) + + +@pytest.mark.parametrize( + "invalid_query", + [ + pytest.param("invalid_query", id="invalid-query"), + pytest.param("tagged,!!invalid!!", id="mixed-invalid"), + pytest.param("🚫", id="emoji"), + ], +) +def test_live_pdf_info_invalid_query( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf: PdfRestFile, + invalid_query: str, +) -> None: + with ( + PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client, + pytest.raises(PdfRestApiError), + ): + client.query_pdf_info( + uploaded_pdf, + queries="tagged", + extra_body={"queries": invalid_query}, + ) + + +@pytest.mark.parametrize( + "query_group", + [ + pytest.param(("tagged", "filename"), id="two-values"), + pytest.param(("page_count", "file_size", "pdf_version"), id="three-values"), + ], +) +def test_live_pdf_info_multiple_queries( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf: PdfRestFile, + query_group: tuple[PdfInfoQuery, ...], +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + response = client.query_pdf_info(uploaded_pdf, queries=query_group) + + assert isinstance(response, PdfRestInfoResponse) + assert str(response.input_id) == str(uploaded_pdf.id) + assert response.all_queries_processed is True + for item in query_group: + _assert_expected_value(item, getattr(response, item)) + + +@pytest.mark.asyncio +async def test_live_pdf_info_async_all_queries( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf: PdfRestFile, +) -> None: + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + response = await client.query_pdf_info(uploaded_pdf, queries=ALLOWED_QUERIES) + + assert isinstance(response, PdfRestInfoResponse) + assert str(response.input_id) == str(uploaded_pdf.id) + assert response.all_queries_processed is True + for query in ALLOWED_QUERIES: + _assert_expected_value(query, getattr(response, query)) diff --git a/tests/live/test_live_pdf_redactions.py b/tests/live/test_live_pdf_redactions.py new file mode 100644 index 00000000..796785a1 --- /dev/null +++ b/tests/live/test_live_pdf_redactions.py @@ -0,0 +1,169 @@ +from __future__ import annotations + +from typing import get_args + +import pytest + +from pdfrest import PdfRestApiError, PdfRestClient +from pdfrest.models import PdfRestFile +from pdfrest.types import PdfRedactionInstruction, PdfRedactionPreset + +from ..resources import get_test_resource_path + + +@pytest.fixture(scope="module") +def uploaded_pdf_for_redaction( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> PdfRestFile: + resource = get_test_resource_path("redactable-text.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + return client.files.create_from_paths([resource])[0] + + +@pytest.mark.parametrize( + "instruction", + [ + pytest.param( + { + "type": "literal", + "value": "The quick brown fox jumped over the lazy dog.", + }, + id="literal", + ), + pytest.param({"type": "regex", "value": r"\b\d{3}-\d{2}-\d{4}\b"}, id="regex"), + *[ + pytest.param({"type": "preset", "value": preset}, id=f"preset-{preset}") + for preset in get_args(PdfRedactionPreset) + ], + ], +) +def test_live_redaction_preview_and_apply_single( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf_for_redaction: PdfRestFile, + instruction: PdfRedactionInstruction, +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + preview = client.preview_redactions( + uploaded_pdf_for_redaction, + redactions=[instruction], + output="redaction-preview", + ) + + assert preview.output_files + preview_file = preview.output_files[0] + assert preview_file.name.endswith("redaction-preview.pdf") + assert preview_file.type == "application/pdf" + + applied = client.apply_redactions( + preview_file, + output="redaction-final", + ) + + assert applied.output_files + final_file = applied.output_files[0] + assert final_file.name.endswith("redaction-final.pdf") + assert final_file.type == "application/pdf" + + +@pytest.mark.parametrize( + "instructions", + [ + pytest.param( + [ + { + "type": "literal", + "value": "The quick brown fox jumped over the lazy dog.", + }, + {"type": "regex", "value": r"\b\d{3}-\d{2}-\d{4}\b"}, + ], + id="literal-and-regex", + ), + pytest.param( + [ + {"type": "preset", "value": "email"}, + {"type": "preset", "value": "phone_number"}, + ], + id="preset-email-and-phone", + ), + pytest.param( + [ + {"type": "preset", "value": "credit_card"}, + {"type": "preset", "value": "bank_routing_number"}, + {"type": "preset", "value": "swift_bic_number"}, + ], + id="multiple-presets", + ), + ], +) +def test_live_redaction_preview_and_apply_multiple( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf_for_redaction: PdfRestFile, + instructions: list[PdfRedactionInstruction], +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + preview = client.preview_redactions( + uploaded_pdf_for_redaction, + redactions=instructions, + output="redaction-preview-multi", + ) + + assert preview.output_files + preview_file = preview.output_files[0] + assert preview_file.name.endswith("redaction-preview-multi.pdf") + assert preview_file.type == "application/pdf" + + applied = client.apply_redactions( + preview_file, + output="redaction-final-multi", + ) + + assert applied.output_files + final_file = applied.output_files[0] + assert final_file.name.endswith("redaction-final-multi.pdf") + assert final_file.type == "application/pdf" + + +@pytest.mark.parametrize( + "extra_body", + [ + pytest.param({"redactions": "invalid"}, id="invalid-redactions"), + pytest.param({"rgb_color": "-1,-1,-1"}, id="invalid-rgb"), + ], +) +def test_live_redactions_invalid_payloads( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_pdf_for_redaction: PdfRestFile, + extra_body: dict[str, object], +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + if "redactions" in extra_body: + with pytest.raises(PdfRestApiError): + client.preview_redactions( + uploaded_pdf_for_redaction, + redactions=[{"type": "literal", "value": "placeholder"}], + extra_body=extra_body, + ) + else: + preview = client.preview_redactions( + uploaded_pdf_for_redaction, + redactions=[{"type": "literal", "value": "placeholder"}], + ) + preview_file = preview.output_files[0] + with pytest.raises(PdfRestApiError): + client.apply_redactions(preview_file, extra_body=extra_body) diff --git a/tests/live/test_live_pdf_split_merge.py b/tests/live/test_live_pdf_split_merge.py new file mode 100644 index 00000000..351be410 --- /dev/null +++ b/tests/live/test_live_pdf_split_merge.py @@ -0,0 +1,457 @@ +from __future__ import annotations + +from collections.abc import Sequence + +import pytest + +from pdfrest import AsyncPdfRestClient, PdfRestApiError, PdfRestClient +from pdfrest.models import PdfRestFile +from pdfrest.types import PdfMergeInput, PdfPageSelection + +from ..resources import get_test_resource_path + + +def _expand_page_selection( + selection: PdfPageSelection | Sequence[PdfPageSelection], + *, + total_pages: int, +) -> list[int]: + def expand_entry(entry: PdfPageSelection) -> list[int]: + if isinstance(entry, int): + return [entry] + text = str(entry).strip() + lowered = text.lower() + if lowered == "even": + return list(range(2, total_pages + 1, 2)) + if lowered == "odd": + return list(range(1, total_pages + 1, 2)) + if lowered == "last": + return [total_pages] + if "-" in lowered: + start_raw, end_raw = (part.strip() for part in lowered.split("-", 1)) + + def resolve(range_token: str) -> int: + return total_pages if range_token == "last" else int(range_token) # noqa: S105 + + start = resolve(start_raw) + end = resolve(end_raw) + step = 1 if end >= start else -1 + return list(range(start, end + step, step)) + return [int(text)] + + if isinstance(selection, Sequence) and not isinstance( + selection, (str, bytes, bytearray) + ): + expanded: list[int] = [] + for segment in selection: + expanded.extend(expand_entry(segment)) + return expanded + return expand_entry(selection) + + +def _extract_merge_entry( + entry: PdfMergeInput, +) -> tuple[PdfRestFile, PdfPageSelection | Sequence[PdfPageSelection]]: + if isinstance(entry, tuple): + return entry + if isinstance(entry, dict): + file = entry["file"] + if file is None: + msg = "PdfMergeDocument entries must include a 'file' key." + raise KeyError(msg) + pages = entry.get("pages") + selection: PdfPageSelection | Sequence[PdfPageSelection] = ( + pages if pages is not None else "1-last" + ) + return file, selection + return entry, "1-last" + + +def _fetch_page_count(client: PdfRestClient, file: PdfRestFile) -> int: + info = client.query_pdf_info(file) + assert info.page_count is not None + return int(info.page_count) + + +async def _fetch_page_count_async(client: AsyncPdfRestClient, file: PdfRestFile) -> int: + info = await client.query_pdf_info(file) + assert info.page_count is not None + return int(info.page_count) + + +@pytest.fixture(scope="module") +def uploaded_live_pdfs( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> tuple[PdfRestFile, PdfRestFile]: + split_source_path = get_test_resource_path("20-pages.pdf") + merge_partner_path = get_test_resource_path("report.pdf") + + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + split_source = client.files.create_from_paths([split_source_path])[0] + merge_partner = client.files.create_from_paths([merge_partner_path])[0] + + return split_source, merge_partner + + +@pytest.mark.parametrize( + ("page_groups", "expected_count"), + [ + pytest.param(["1-5", "6-last"], 2, id="two-ranges"), + pytest.param([["1", "3", "5"], "2-4"], 2, id="alternating-selection"), + pytest.param(["even"], 1, id="even-only"), + pytest.param(["9-2"], 1, id="descending-single"), + pytest.param(["odd", "even"], 2, id="odd-and-even"), + ], +) +def test_live_split_pdf_page_groups( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], + page_groups: list[PdfPageSelection], + expected_count: int, +) -> None: + split_source, _ = uploaded_live_pdfs + + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + total_pages = _fetch_page_count(client, split_source) + + response = client.split_pdf( + split_source, + page_groups=page_groups, + output_prefix="live-split", + ) + + assert len(response.output_files) == expected_count + + output_infos = [ + client.query_pdf_info(output_file) for output_file in response.output_files + ] + + assert all( + output_file.name.startswith("live-split") + and output_file.name.endswith(".pdf") + and output_file.type == "application/pdf" + and output_file.size > 0 + for output_file in response.output_files + ) + page_counts_optional = [info.page_count for info in output_infos] + assert all(count is not None for count in page_counts_optional) + expected_page_counts = [ + len(_expand_page_selection(group, total_pages=total_pages)) + for group in page_groups + ][: len(page_counts_optional)] + page_counts = [ + int(count) for count in page_counts_optional if count is not None + ] + assert page_counts == expected_page_counts + assert str(response.input_id) == str(split_source.id) + + +def test_live_split_pdf_default_outputs( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], +) -> None: + split_source, _ = uploaded_live_pdfs + + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + total_pages = _fetch_page_count(client, split_source) + + response = client.split_pdf( + split_source, + output_prefix="live-split-default", + ) + + assert len(response.output_files) == total_pages + + output_infos = [ + client.query_pdf_info(output_file) for output_file in response.output_files + ] + assert all( + output_file.name.startswith("live-split-default") + and output_file.name.endswith(".pdf") + and output_file.type == "application/pdf" + and output_file.size > 0 + for output_file in response.output_files + ) + assert all(info.page_count == 1 for info in output_infos) + + assert str(response.input_id) == str(split_source.id) + + +def test_live_split_pdf_invalid_pages( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], +) -> None: + split_source, _ = uploaded_live_pdfs + + with ( + PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client, + pytest.raises(PdfRestApiError), + ): + client.split_pdf( + split_source, + page_groups=["1-2"], + extra_body={"pages": ["0"]}, + ) + + +def test_live_merge_pdfs_success( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], +) -> None: + split_source, merge_partner = uploaded_live_pdfs + sources: list[PdfMergeInput] = [ + {"file": split_source, "pages": "odd"}, + {"file": split_source, "pages": "even"}, + {"file": merge_partner, "pages": "1"}, + ] + + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + source_infos = { + str(candidate.id): _fetch_page_count(client, candidate) + for candidate in (split_source, merge_partner) + } + + response = client.merge_pdfs( + sources, + output_prefix="live-merge", + ) + + assert len(response.input_ids) == len(sources) + + expected_total_pages = sum( + len( + _expand_page_selection( + selection, total_pages=source_infos[str(file.id)] + ) + ) + for file, selection in (_extract_merge_entry(entry) for entry in sources) + ) + + output_file = response.output_file + assert output_file.name.startswith("live-merge") + assert output_file.name.endswith(".pdf") + assert output_file.type == "application/pdf" + assert output_file.size > 0 + + output_info = client.query_pdf_info(output_file) + assert output_info.page_count == expected_total_pages + + +def test_live_merge_pdfs_invalid_pages( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], +) -> None: + split_source, merge_partner = uploaded_live_pdfs + sources: list[PdfMergeInput] = [ + {"file": split_source, "pages": "even"}, + {"file": merge_partner, "pages": "1"}, + ] + + with ( + PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client, + pytest.raises(PdfRestApiError), + ): + client.merge_pdfs( + sources, + output_prefix="live-merge-invalid", + extra_body={"pages": ["even", "0"]}, + ) + + +@pytest.mark.asyncio +async def test_live_async_merge_pdfs( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], +) -> None: + split_source, merge_partner = uploaded_live_pdfs + sources: list[PdfMergeInput] = [ + {"file": split_source, "pages": "9-2"}, + {"file": merge_partner, "pages": "1"}, + ] + + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + split_page_count = await _fetch_page_count_async(client, split_source) + partner_page_count = await _fetch_page_count_async(client, merge_partner) + + response = await client.merge_pdfs( + sources, + output_prefix="live-async-merge", + ) + + source_page_counts = { + str(split_source.id): split_page_count, + str(merge_partner.id): partner_page_count, + } + expected_total_pages = sum( + len( + _expand_page_selection( + selection, total_pages=source_page_counts[str(file.id)] + ) + ) + for file, selection in (_extract_merge_entry(entry) for entry in sources) + ) + + output_file = response.output_file + assert output_file.name.startswith("live-async-merge") + assert output_file.name.endswith(".pdf") + assert output_file.type == "application/pdf" + assert output_file.size > 0 + + output_info = await client.query_pdf_info(output_file) + assert output_info.page_count == expected_total_pages + + +SPLIT_RANGE_CASES = [ + pytest.param("3", True, False, id="single-str"), + pytest.param(3, True, False, id="single-int"), + pytest.param("2-5", True, False, id="ascending-range"), + pytest.param("5-2", True, False, id="descending-range"), + pytest.param("even", True, False, id="even"), + pytest.param("odd", True, False, id="odd"), + pytest.param("2-last", True, False, id="to-last"), + pytest.param("last-2", True, False, id="last-desc"), + pytest.param("last", False, True, id="last"), +] + + +@pytest.mark.parametrize( + ("selection", "expect_success", "requires_override"), SPLIT_RANGE_CASES +) +def test_live_split_pdf_page_range_variants( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], + selection: PdfPageSelection, + expect_success: bool, + requires_override: bool, + request: pytest.FixtureRequest, +) -> None: + split_source, _ = uploaded_live_pdfs + case_id = request.node.callspec.id + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + total_pages = _fetch_page_count(client, split_source) + override_body = None + if requires_override: + override_body = {"pages": [str(selection)]} + + if expect_success: + response = client.split_pdf( + split_source, + page_groups=[selection if not requires_override else "1"], + output_prefix=f"live-split-range-{case_id}", + extra_body=override_body, + ) + expected_pages = _expand_page_selection(selection, total_pages=total_pages) + output_pages = client.query_pdf_info(response.output_files[0]).page_count + assert output_pages == len(expected_pages) + else: + with pytest.raises(PdfRestApiError): + client.split_pdf( + split_source, + page_groups=[selection if not requires_override else "1"], + output_prefix=f"live-split-range-{case_id}", + extra_body=override_body, + ) + + +MERGE_RANGE_CASES = [ + pytest.param("3", True, False, id="single-str"), + pytest.param(3, True, False, id="single-int"), + pytest.param("2-5", True, False, id="ascending-range"), + pytest.param("5-2", True, False, id="descending-range"), + pytest.param("even", True, False, id="even"), + pytest.param("odd", True, False, id="odd"), + pytest.param("2-last", True, False, id="to-last"), + pytest.param("last-2", True, False, id="last-desc"), + pytest.param("last", False, False, id="last"), +] + + +@pytest.mark.parametrize( + ("selection", "expect_success", "requires_override"), MERGE_RANGE_CASES +) +def test_live_merge_pdf_page_range_variants( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + uploaded_live_pdfs: tuple[PdfRestFile, PdfRestFile], + selection: PdfPageSelection, + expect_success: bool, + requires_override: bool, + request: pytest.FixtureRequest, +) -> None: + split_source, merge_partner = uploaded_live_pdfs + case_id = request.node.callspec.id + with PdfRestClient( + api_key=pdfrest_api_key, + base_url=pdfrest_live_base_url, + ) as client: + source_page_counts = { + str(split_source.id): _fetch_page_count(client, split_source), + str(merge_partner.id): _fetch_page_count(client, merge_partner), + } + sources: list[PdfMergeInput] = [ + { + "file": split_source, + "pages": selection if not requires_override else "1", + }, + {"file": merge_partner, "pages": "1"}, + ] + override_body = {"pages": [str(selection), "1"]} if requires_override else None + + if expect_success: + response = client.merge_pdfs( + sources, + output_prefix=f"live-merge-range-{case_id}", + extra_body=override_body, + ) + expected_total_pages = sum( + len( + _expand_page_selection( + chosen_selection, + total_pages=source_page_counts[str(file.id)], + ) + ) + for file, chosen_selection in ( + _extract_merge_entry(entry) for entry in sources + ) + ) + output_info = client.query_pdf_info(response.output_file) + assert output_info.page_count == expected_total_pages + else: + with pytest.raises(PdfRestApiError): + client.merge_pdfs( + sources, + output_prefix=f"live-merge-range-{case_id}", + extra_body=override_body, + ) diff --git a/tests/resources/20-pages.pdf b/tests/resources/20-pages.pdf new file mode 100644 index 00000000..d52b7683 Binary files /dev/null and b/tests/resources/20-pages.pdf differ diff --git a/tests/resources/__init__.py b/tests/resources/__init__.py new file mode 100644 index 00000000..4867293b --- /dev/null +++ b/tests/resources/__init__.py @@ -0,0 +1,7 @@ +from pathlib import Path + +here = Path(__file__).parent + + +def get_test_resource_path(*pathsegments: str) -> Path: + return here.joinpath(*pathsegments) diff --git a/tests/resources/redactable-text.pdf b/tests/resources/redactable-text.pdf new file mode 100644 index 00000000..b53e2950 Binary files /dev/null and b/tests/resources/redactable-text.pdf differ diff --git a/tests/resources/report.docx b/tests/resources/report.docx new file mode 100644 index 00000000..1effe9c6 Binary files /dev/null and b/tests/resources/report.docx differ diff --git a/tests/resources/report.pdf b/tests/resources/report.pdf new file mode 100644 index 00000000..996131f4 Binary files /dev/null and b/tests/resources/report.pdf differ diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 00000000..a664de3e --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,377 @@ +from __future__ import annotations + +from datetime import date +from typing import Any + +import httpx +import pytest + +from pdfrest import ( + AsyncPdfRestClient, + PdfRestApiError, + PdfRestAuthenticationError, + PdfRestClient, + PdfRestConfigurationError, + PdfRestTimeoutError, + UpResponse, + client as client_module, +) + +VALID_API_KEY = "12345678-1234-1234-1234-123456789abc" +ANOTHER_VALID_API_KEY = "abcdefab-cdef-abcd-efab-cdefabcdef12" +ASYNC_API_KEY = "fedcba98-7654-3210-fedc-ba9876543210" + + +def _build_up_response() -> dict[str, Any]: + return { + "status": "OK", + "product": "pdfRest API Toolkit", + "releaseDate": "2025-09-25", + "version": "2.31.1", + } + + +def test_client_uses_provided_api_key(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["Api-Key"] == VALID_API_KEY + assert request.url.path == "/up" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.up() + + assert isinstance(response, UpResponse) + assert response.release_date == date(2025, 9, 25) + + +def test_client_reads_api_key_from_env(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["Api-Key"] == VALID_API_KEY + assert request.url.host == "example.com" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient(base_url="https://example.com", transport=transport) as client: + response = client.up() + + assert response.product == "pdfRest API Toolkit" + + +def test_client_sets_sdk_headers(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + monkeypatch.setattr(client_module.importlib.metadata, "version", lambda _: "1.2.3") + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["wsn"] == "pdfrest-python" + assert request.headers["User-Agent"] == "pdfrest-python-sdk/1.2.3" + assert request.headers["Accept"] == "application/json" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + client.up() + + +@pytest.mark.asyncio +async def test_async_client_sets_sdk_headers( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", ASYNC_API_KEY) + monkeypatch.setattr(client_module.importlib.metadata, "version", lambda _: "4.5.6") + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["wsn"] == "pdfrest-python" + assert request.headers["User-Agent"] == "pdfrest-python-sdk/4.5.6" + assert request.headers["Accept"] == "application/json" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + await client.up() + + +def test_missing_api_key_raises(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + with pytest.raises(PdfRestConfigurationError): + PdfRestClient() + + +def test_invalid_length_api_key_raises(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + with pytest.raises(PdfRestConfigurationError): + PdfRestClient(api_key="too-short") + + +def test_invalid_uuid_api_key_raises(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + bad_uuid = "12345678-1234-1234-1234-123456789abz" + + with pytest.raises(PdfRestConfigurationError): + PdfRestClient(api_key=bad_uuid) + + +def test_client_allows_missing_api_key_for_custom_host( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(request: httpx.Request) -> httpx.Response: + assert "Api-Key" not in request.headers + assert request.url.host == "internal.example" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient( + base_url="https://internal.example", transport=transport + ) as client: + response = client.up() + + assert response.status == "OK" + + +def test_up_with_custom_headers(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["Api-Key"] == ANOTHER_VALID_API_KEY + assert request.headers["X-Test-Header"] == "value" + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=ANOTHER_VALID_API_KEY, transport=transport) as client: + response = client.up(extra_headers={"X-Test-Header": "value"}) + + assert response.version == "2.31.1" + + +def test_up_with_query_and_timeout(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + captured_timeout: dict[str, float | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + assert request.url.params["view"] == "full" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with PdfRestClient(transport=transport) as client: + response = client.up( + extra_query={"view": "full", "unused": None}, + timeout=0.5, + ) + + assert response.product == "pdfRest API Toolkit" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.5) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.5) + + +def test_up_rejects_extra_body(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with ( + pytest.raises(PdfRestConfigurationError), + PdfRestClient(transport=transport) as client, + ): + client.up(extra_body={"unexpected": "value"}) + + +def test_prepare_request_merges_queries(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", "key") + with PdfRestClient(api_key=VALID_API_KEY) as client: + request = client.prepare_request( + "GET", + "/test", + query={"base": "value", "skip": None}, + extra_query={"base": "override", "extra": 42, "ignore": None}, + ) + + assert request.params == { + "base": "override", + "extra": 42, + "skip": None, + "ignore": None, + } + + +def test_prepare_request_rejects_files_with_json( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + with ( + PdfRestClient(api_key=VALID_API_KEY) as client, + pytest.raises( + PdfRestConfigurationError, + match="JSON payloads cannot be combined with multipart file uploads", + ), + ): + client.prepare_request( + "POST", + "/upload", + json_body={"foo": "bar"}, + files=[("file", b"data")], + ) + + +def test_authentication_error_raises_specific_exception( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + return httpx.Response(401, json={"message": "The provided key is not valid."}) + + transport = httpx.MockTransport(handler) + with ( + pytest.raises(PdfRestAuthenticationError) as exc_info, + PdfRestClient(transport=transport) as client, + ): + client.up() + assert "The provided key is not valid." in str(exc_info.value) + + +def test_authentication_error_handles_non_json( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + return httpx.Response(401, text="Unauthorized") + + transport = httpx.MockTransport(handler) + with ( + pytest.raises(PdfRestAuthenticationError) as exc_info, + PdfRestClient(transport=transport) as client, + ): + client.up() + assert "Authentication with pdfRest failed." in str(exc_info.value) + assert exc_info.value.response_content == "Unauthorized" + + +def test_client_raises_for_non_success_response( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", VALID_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + return httpx.Response(500, json={"message": "server error"}) + + transport = httpx.MockTransport(handler) + with ( + pytest.raises(PdfRestApiError) as exc_info, + PdfRestClient(transport=transport) as client, + ): + client.up() + assert exc_info.value.status_code == 500 + + +@pytest.mark.asyncio +async def test_async_client_up(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", ASYNC_API_KEY) + + def handler(request: httpx.Request) -> httpx.Response: + assert request.headers["Api-Key"] == ASYNC_API_KEY + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(transport=transport) as client: + response = await client.up() + + assert response.status == "OK" + + +@pytest.mark.asyncio +async def test_async_up_with_query_and_timeout( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", ASYNC_API_KEY) + + captured_timeout: dict[str, float | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + assert request.url.params["mode"] == "ping" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(transport=transport) as client: + response = await client.up( + extra_query={"mode": "ping"}, + timeout=0.25, + ) + + assert response.status == "OK" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.25) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.25) + + +@pytest.mark.asyncio +async def test_async_client_translates_timeout(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("PDFREST_API_KEY", ASYNC_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + message = "timeout" + raise httpx.TimeoutException(message) + + transport = httpx.MockTransport(handler) + with pytest.raises(PdfRestTimeoutError): + async with AsyncPdfRestClient(transport=transport) as client: + await client.up() + + +@pytest.mark.asyncio +async def test_async_up_rejects_extra_body( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.setenv("PDFREST_API_KEY", ASYNC_API_KEY) + + def handler(_: httpx.Request) -> httpx.Response: + return httpx.Response(200, json=_build_up_response()) + + transport = httpx.MockTransport(handler) + with pytest.raises(PdfRestConfigurationError): + async with AsyncPdfRestClient(transport=transport) as client: + await client.up(extra_body={"unexpected": "value"}) + + +def test_live_client_up(pdfrest_api_key: str, pdfrest_live_base_url: str) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + response = client.up() + assert response.status.upper() == "OK" + assert response.product + + +@pytest.mark.asyncio +async def test_live_async_client_up( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + response = await client.up() + assert response.version diff --git a/tests/test_convert_to_bmp.py b/tests/test_convert_to_bmp.py new file mode 100644 index 00000000..a07eb749 --- /dev/null +++ b/tests/test_convert_to_bmp.py @@ -0,0 +1,558 @@ +from __future__ import annotations + +import json +import re + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import BmpPdfRestPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + assert_conversion_payload, + build_file_info_payload, + make_pdf_file, +) +from .resources import get_test_resource_path + + +@pytest.mark.parametrize("color_model", ["rgb", "gray"]) +def test_convert_to_bmp_success( + monkeypatch: pytest.MonkeyPatch, color_model: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + expected_name = f"converted-{color_model}-001.bmp" + + request_payload = BmpPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "converted", + "page_range": ["1", "2-3"], + "resolution": 600, + "color_model": color_model, + "smoothing": ["text", "image"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, expected_name, "image/bmp"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_bmp( + input_file, + output_prefix="converted", + page_range=["1", "2-3"], + resolution=600, + color_model=color_model, # type: ignore[arg-type] + smoothing=["text", "image"], + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == expected_name + assert output_file.type == "image/bmp" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert response.warning is None + + +def test_convert_to_bmp_defaults_excluded(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "2c3d4e5f-2345-4bcd-8ef0-abcdefabcdef" + + request_payload = BmpPdfRestPayload.model_validate( + {"files": input_file} + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.bmp", "image/bmp"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_bmp(input_file) + + output_file = response.output_files[0] + assert output_file.name == "example-001.bmp" + assert output_file.type == "image/bmp" + assert response.warning is None + + +@pytest.mark.parametrize("resolution", [12, 2400]) +def test_convert_to_bmp_resolution_limits( + monkeypatch: pytest.MonkeyPatch, resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = BmpPdfRestPayload.model_validate( + { + "files": [input_file], + "resolution": resolution, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, f"example-resolution-{resolution}.bmp", "image/bmp" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_bmp( + input_file, + resolution=resolution, + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == f"example-resolution-{resolution}.bmp" + + +@pytest.mark.parametrize("invalid_resolution", [11, 2401]) +def test_convert_to_bmp_resolution_out_of_bounds( + monkeypatch: pytest.MonkeyPatch, invalid_resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=r"less than or equal to 2400|greater than or equal to 12", + ), + ): + client.convert_to_bmp( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=invalid_resolution, + ) + + +@pytest.mark.parametrize( + "invalid_color", + [ + pytest.param("rgba", id="rgba"), + pytest.param("cmyk", id="cmyk"), + pytest.param("lab", id="lab"), + ], +) +def test_convert_to_bmp_invalid_color_models( + monkeypatch: pytest.MonkeyPatch, invalid_color: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'rgb' or 'gray'"), + ), + ): + client.convert_to_bmp( + make_pdf_file(PdfRestFileID.generate(1)), + color_model=invalid_color, # type: ignore[arg-type] + ) + + +@pytest.mark.asyncio +async def test_async_convert_to_bmp_success( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "3d4e5f60-3456-4cde-8f01-cdefabcdef12" + + request_payload = BmpPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-output", + "page_range": "1-2", + "resolution": 450, + "color_model": "gray", + "smoothing": ["all"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-output-001.bmp", "image/bmp" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_bmp( + [input_file], + output_prefix="async-output", + page_range="1-2", + resolution=450, + color_model="gray", + smoothing=["all"], + ) + + assert seen == {"post": 1, "get": 1} + assert isinstance(response, PdfRestFileBasedResponse) + output_file = response.output_files[0] + assert output_file.name == "async-output-001.bmp" + assert output_file.type == "image/bmp" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert response.warning is None + + +def test_convert_to_bmp_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "4e5f6071-4567-4def-90ab-abcdefabcdef" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "bmp" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 500 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["format"] == "info" + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "bmp" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "custom-001.bmp", "image/bmp"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_bmp( + input_file, + resolution=500, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "bmp"}, + extra_body={"debug": True}, + timeout=0.4, + ) + + assert response.output_files[0].name == "custom-001.bmp" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.4) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.4) + + +def test_convert_to_bmp_validation_error(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match="less than or equal to 2400", + ), + ): + client.convert_to_bmp( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=5000, + ) + + +def test_convert_to_bmp_invalid_smoothing_value( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'none', 'all', 'text', 'line' or 'image'"), + ), + ): + client.convert_to_bmp( + make_pdf_file(PdfRestFileID.generate(1)), + smoothing="invalid", # type: ignore[arg-type] + ) + + +def test_convert_to_bmp_multiple_files_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + first = make_pdf_file(PdfRestFileID.generate(1)) + second = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at most 1 item after validation"), + ), + ): + client.convert_to_bmp([first, second]) + + +def test_convert_to_bmp_empty_page_range_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at least 1 item after validation"), + ), + ): + client.convert_to_bmp( + make_pdf_file(PdfRestFileID.generate(1)), + page_range=[], + ) + + +def test_convert_to_bmp_sequence_arguments(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "5f607182-5678-4ef0-91bc-cdefabcdef99" + + request_payload = BmpPdfRestPayload.model_validate( + { + "files": [input_file], + "page_range": "1, 3", + "smoothing": "text", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.bmp", "image/bmp"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_bmp( + [input_file], + page_range="1, 3", + smoothing="text", + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == "example-001.bmp" + + +@pytest.mark.asyncio +async def test_async_convert_to_bmp_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "6a7b8c9d-6789-4012-a1b2-c3d4e5f6a7b8" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/bmp": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-bmp" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 475 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-bmp" + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-custom-001.bmp", "image/bmp" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_bmp( + input_file, + resolution=475, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "async-bmp"}, + extra_body={"debug": True}, + timeout=0.55, + ) + + assert response.output_files[0].name == "async-custom-001.bmp" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.55) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.55) + + +def test_live_convert_to_bmp( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource]) + response = client.convert_to_bmp( + uploaded[0], + output_prefix="live-bmp", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files + + +@pytest.mark.asyncio +async def test_live_async_convert_to_bmp( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = await client.files.create_from_paths([resource]) + response = await client.convert_to_bmp( + uploaded[0], + output_prefix="live-async-bmp", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files diff --git a/tests/test_convert_to_gif.py b/tests/test_convert_to_gif.py new file mode 100644 index 00000000..00569c67 --- /dev/null +++ b/tests/test_convert_to_gif.py @@ -0,0 +1,551 @@ +from __future__ import annotations + +import json +import re + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import GifPdfRestPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + assert_conversion_payload, + build_file_info_payload, + make_pdf_file, +) +from .resources import get_test_resource_path + + +@pytest.mark.parametrize("color_model", ["rgb", "gray"]) +def test_convert_to_gif_success( + monkeypatch: pytest.MonkeyPatch, color_model: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + expected_name = f"converted-{color_model}-001.gif" + + request_payload = GifPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "converted", + "page_range": ["1", "3-4"], + "resolution": 500, + "color_model": color_model, + "smoothing": ["text", "image"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, expected_name, "image/gif"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_gif( + input_file, + output_prefix="converted", + page_range=["1", "3-4"], + resolution=500, + color_model=color_model, # type: ignore[arg-type] + smoothing=["text", "image"], + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == expected_name + assert output_file.type == "image/gif" + assert str(output_file.url).endswith(output_id) + + +def test_convert_to_gif_defaults_excluded(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "5b6c7d8e-2345-4bcd-9ef0-abcdefabcdef" + + request_payload = GifPdfRestPayload.model_validate( + {"files": input_file} + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.gif", "image/gif"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_gif(input_file) + + output_file = response.output_files[0] + assert output_file.name == "example-001.gif" + assert output_file.type == "image/gif" + + +@pytest.mark.parametrize("resolution", [12, 2400]) +def test_convert_to_gif_resolution_limits( + monkeypatch: pytest.MonkeyPatch, resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = GifPdfRestPayload.model_validate( + { + "files": [input_file], + "resolution": resolution, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, f"example-resolution-{resolution}.gif", "image/gif" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_gif( + input_file, + resolution=resolution, + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == f"example-resolution-{resolution}.gif" + + +@pytest.mark.parametrize( + "invalid_color", + [ + pytest.param("rgba", id="rgba"), + pytest.param("cmyk", id="cmyk"), + pytest.param("lab", id="lab"), + ], +) +def test_convert_to_gif_invalid_color_model( + monkeypatch: pytest.MonkeyPatch, invalid_color: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'rgb' or 'gray'"), + ), + ): + client.convert_to_gif( + make_pdf_file(PdfRestFileID.generate(1)), + color_model=invalid_color, # type: ignore[arg-type] + ) + + +@pytest.mark.parametrize("invalid_resolution", [11, 2401]) +def test_convert_to_gif_resolution_out_of_bounds( + monkeypatch: pytest.MonkeyPatch, invalid_resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=r"less than or equal to 2400|greater than or equal to 12", + ), + ): + client.convert_to_gif( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=invalid_resolution, + ) + + +@pytest.mark.asyncio +async def test_async_convert_to_gif_success( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "6c7d8e9f-3456-4cde-af01-cdefabcdef12" + + request_payload = GifPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-output", + "page_range": "1-2", + "resolution": 400, + "color_model": "gray", + "smoothing": ["all"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-output-001.gif", "image/gif" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_gif( + [input_file], + output_prefix="async-output", + page_range="1-2", + resolution=400, + color_model="gray", + smoothing=["all"], + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == "async-output-001.gif" + assert output_file.type == "image/gif" + + +def test_convert_to_gif_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "7d8e9f00-4567-4ef0-90ab-abcdefabcdef" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "gif" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 475 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.url.params["format"] == "info" + assert request.headers["X-Debug"] == "gif" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "custom-001.gif", "image/gif"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_gif( + input_file, + resolution=475, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "gif"}, + extra_body={"debug": True}, + timeout=0.35, + ) + + assert response.output_files[0].name == "custom-001.gif" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.35) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.35) + + +def test_convert_to_gif_validation_error(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match="less than or equal to 2400", + ), + ): + client.convert_to_gif( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=9999, + ) + + +def test_convert_to_gif_invalid_smoothing_value( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'none', 'all', 'text', 'line' or 'image'"), + ), + ): + client.convert_to_gif( + make_pdf_file(PdfRestFileID.generate(1)), + smoothing="invalid", # type: ignore[arg-type] + ) + + +def test_convert_to_gif_multiple_files_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + first = make_pdf_file(PdfRestFileID.generate(1)) + second = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at most 1 item after validation"), + ), + ): + client.convert_to_gif([first, second]) + + +def test_convert_to_gif_empty_page_range_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at least 1 item after validation"), + ), + ): + client.convert_to_gif( + make_pdf_file(PdfRestFileID.generate(1)), + page_range=[], + ) + + +def test_convert_to_gif_sequence_arguments(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "8e9f0011-5678-4f01-a234-cdefabcdef55" + + request_payload = GifPdfRestPayload.model_validate( + { + "files": [input_file], + "page_range": "1, 3", + "smoothing": "text", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.gif", "image/gif"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_gif( + [input_file], + page_range="1, 3", + smoothing="text", + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == "example-001.gif" + + +@pytest.mark.asyncio +async def test_async_convert_to_gif_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "9f001122-6789-4012-b345-cdefabcdef66" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/gif": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-gif" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 425 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-gif" + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-custom-001.gif", "image/gif" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_gif( + input_file, + resolution=425, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "async-gif"}, + extra_body={"debug": True}, + timeout=0.48, + ) + + assert response.output_files[0].name == "async-custom-001.gif" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.48) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.48) + + +def test_live_convert_to_gif( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource]) + response = client.convert_to_gif( + uploaded[0], + output_prefix="live-gif", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files + + +@pytest.mark.asyncio +async def test_live_async_convert_to_gif( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = await client.files.create_from_paths([resource]) + response = await client.convert_to_gif( + uploaded[0], + output_prefix="live-async-gif", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files diff --git a/tests/test_convert_to_jpeg.py b/tests/test_convert_to_jpeg.py new file mode 100644 index 00000000..46e5f648 --- /dev/null +++ b/tests/test_convert_to_jpeg.py @@ -0,0 +1,590 @@ +from __future__ import annotations + +import json +import re + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import JpegPdfRestPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + assert_conversion_payload, + build_file_info_payload, + make_pdf_file, +) +from .resources import get_test_resource_path + + +@pytest.mark.parametrize("color_model", ["rgb", "cmyk", "gray"]) +def test_convert_to_jpeg_success( + monkeypatch: pytest.MonkeyPatch, color_model: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + expected_name = f"converted-{color_model}-001.jpg" + + request_payload = JpegPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "converted", + "page_range": ["1", "2-3"], + "resolution": 450, + "color_model": color_model, + "jpeg_quality": 90, + "smoothing": ["text", "image"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload( + payload, request_payload, allowed_extras={"jpeg_quality"} + ) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, expected_name, "image/jpeg"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_jpeg( + input_file, + output_prefix="converted", + page_range=["1", "2-3"], + resolution=450, + color_model=color_model, # type: ignore[arg-type] + smoothing=["text", "image"], + jpeg_quality=90, + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == expected_name + assert output_file.type == "image/jpeg" + assert str(output_file.url).endswith(output_id) + + +def test_convert_to_jpeg_defaults_excluded(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "8e9f0011-2222-4bcd-9f00-abcdefabcdef" + + request_payload = JpegPdfRestPayload.model_validate( + {"files": input_file} + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload( + payload, request_payload, allowed_extras={"jpeg_quality"} + ) + assert "jpeg_quality" not in payload + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "example-001.jpg", "image/jpeg" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_jpeg(input_file) + + output_file = response.output_files[0] + assert output_file.name == "example-001.jpg" + assert output_file.type == "image/jpeg" + + +@pytest.mark.parametrize("resolution", [12, 2400]) +def test_convert_to_jpeg_resolution_limits( + monkeypatch: pytest.MonkeyPatch, resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = JpegPdfRestPayload.model_validate( + { + "files": [input_file], + "resolution": resolution, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload( + payload, request_payload, allowed_extras={"jpeg_quality"} + ) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, f"example-resolution-{resolution}.jpg", "image/jpeg" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_jpeg( + input_file, + resolution=resolution, + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == f"example-resolution-{resolution}.jpg" + + +@pytest.mark.parametrize("invalid_resolution", [11, 2401]) +def test_convert_to_jpeg_resolution_out_of_bounds( + monkeypatch: pytest.MonkeyPatch, invalid_resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=r"less than or equal to 2400|greater than or equal to 12", + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=invalid_resolution, + ) + + +@pytest.mark.parametrize( + "invalid_color", + [pytest.param("rgba", id="rgba"), pytest.param("lab", id="lab")], +) +def test_convert_to_jpeg_invalid_color_model( + monkeypatch: pytest.MonkeyPatch, invalid_color: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'rgb', 'cmyk' or 'gray'"), + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + color_model=invalid_color, # type: ignore[arg-type] + ) + + +def test_convert_to_jpeg_invalid_quality(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be greater than or equal to 1"), + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + jpeg_quality=0, + ) + + +@pytest.mark.asyncio +async def test_async_convert_to_jpeg_success( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "9f001122-3333-4cde-af01-cdefabcdef12" + + request_payload = JpegPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-output", + "page_range": "1-2", + "resolution": 500, + "color_model": "gray", + "jpeg_quality": 85, + "smoothing": ["all"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload( + payload, request_payload, allowed_extras={"jpeg_quality"} + ) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-output-001.jpg", "image/jpeg" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_jpeg( + [input_file], + output_prefix="async-output", + page_range="1-2", + resolution=500, + color_model="gray", + smoothing=["all"], + jpeg_quality=85, + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == "async-output-001.jpg" + assert output_file.type == "image/jpeg" + + +def test_convert_to_jpeg_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "abcdef01-4444-4def-9012-bbbbbbbbbbbb" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "jpeg" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 475 + assert payload["jpeg_quality"] == 82 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.url.params["format"] == "info" + assert request.headers["X-Debug"] == "jpeg" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "custom-001.jpg", "image/jpeg"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_jpeg( + input_file, + resolution=475, + color_model="rgb", + jpeg_quality=82, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "jpeg"}, + extra_body={"debug": True}, + timeout=0.42, + ) + + assert response.output_files[0].name == "custom-001.jpg" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.42) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.42) + + +def test_convert_to_jpeg_validation_error(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match="less than or equal to 2400", + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=5000, + ) + + +def test_convert_to_jpeg_invalid_smoothing_value( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'none', 'all', 'text', 'line' or 'image'"), + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + smoothing="invalid", # type: ignore[arg-type] + ) + + +def test_convert_to_jpeg_multiple_files_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + first = make_pdf_file(PdfRestFileID.generate(1)) + second = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at most 1 item after validation"), + ), + ): + client.convert_to_jpeg([first, second]) + + +def test_convert_to_jpeg_empty_page_range_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at least 1 item after validation"), + ), + ): + client.convert_to_jpeg( + make_pdf_file(PdfRestFileID.generate(1)), + page_range=[], + ) + + +def test_convert_to_jpeg_sequence_arguments(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "cdef0123-5555-4ab0-9123-dededededede" + + request_payload = JpegPdfRestPayload.model_validate( + { + "files": [input_file], + "page_range": "1, 3", + "smoothing": "text", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload, allowed_extras=set()) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "example-001.jpg", "image/jpeg" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_jpeg( + [input_file], + page_range="1, 3", + smoothing="text", + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == "example-001.jpg" + + +@pytest.mark.asyncio +async def test_async_convert_to_jpeg_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "def01234-6666-4bc1-9234-eeeeeeeeeeee" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/jpg": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-jpeg" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 440 + assert payload["jpeg_quality"] == 88 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-jpeg" + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-custom-001.jpg", "image/jpeg" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_jpeg( + input_file, + resolution=440, + color_model="gray", + jpeg_quality=88, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "async-jpeg"}, + extra_body={"debug": True}, + timeout=0.51, + ) + + assert response.output_files[0].name == "async-custom-001.jpg" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.51) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.51) + + +def test_live_convert_to_jpeg( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource]) + response = client.convert_to_jpeg( + uploaded[0], + output_prefix="live-jpeg", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files + + +@pytest.mark.asyncio +async def test_live_async_convert_to_jpeg( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = await client.files.create_from_paths([resource]) + response = await client.convert_to_jpeg( + uploaded[0], + output_prefix="live-async-jpeg", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files diff --git a/tests/test_convert_to_png.py b/tests/test_convert_to_png.py new file mode 100644 index 00000000..4ac8b1f7 --- /dev/null +++ b/tests/test_convert_to_png.py @@ -0,0 +1,613 @@ +from __future__ import annotations + +import json +import re + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import PngPdfRestPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + assert_conversion_payload, + build_file_info_payload, + make_pdf_file, +) +from .resources import get_test_resource_path + + +@pytest.mark.parametrize("color_model", ["rgb", "rgba", "gray"]) +def test_convert_to_png_success( + monkeypatch: pytest.MonkeyPatch, color_model: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + expected_name = f"converted-{color_model}-001.png" + + request_payload = PngPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "converted", + "page_range": ["1", "2-3"], + "resolution": 600, + "color_model": color_model, + "smoothing": ["text", "image"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + seen["post"] += 1 + assert request.headers["wsn"] == "pdfrest-python" + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, expected_name, "image/png"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_png( + input_file, + output_prefix="converted", + page_range=["1", "2-3"], + resolution=600, + color_model=color_model, # type: ignore[arg-type] + smoothing=["text", "image"], + ) + + assert seen == {"post": 1, "get": 1} + assert isinstance(response, PdfRestFileBasedResponse) + assert len(response.output_files) == 1 + output_file = response.output_files[0] + assert output_file.name == expected_name + assert output_file.type == "image/png" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert str(response.input_id) == str(input_file.id) + assert response.warning is None + + +def test_convert_to_png_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "9f4a9b10-3c55-4e6d-a111-1234567890ab" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "1" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 450 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["format"] == "info" + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "1" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "custom-001.png", "image/png"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_png( + input_file, + resolution=450, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "1"}, + extra_body={"debug": True}, + timeout=0.25, + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files[0].name == "custom-001.png" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.25) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.25) + + +@pytest.mark.parametrize("resolution", [12, 2400]) +def test_convert_to_png_resolution_limits( + monkeypatch: pytest.MonkeyPatch, resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = PngPdfRestPayload.model_validate( + { + "files": [input_file], + "resolution": resolution, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, f"example-resolution-{resolution}.png", "image/png" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_png( + input_file, + resolution=resolution, + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == f"example-resolution-{resolution}.png" + + +@pytest.mark.parametrize( + "invalid_resolution", + [11, 2401], +) +def test_convert_to_png_resolution_out_of_bounds( + monkeypatch: pytest.MonkeyPatch, invalid_resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=r"less than or equal to 2400|greater than or equal to 12", + ), + ): + client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=invalid_resolution, + ) + + +def test_convert_to_png_validation_error(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="greater than or equal to 12"), + ): + client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=5, + ) + + +@pytest.mark.asyncio +async def test_async_convert_to_png_success(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "2c134412-aaaa-4bbb-8ccc-dddddddddddd" + + request_payload = PngPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-output", + "page_range": "1-2", + "resolution": 450, + "color_model": "rgb", + "smoothing": ["all"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-output-001.png", "image/png" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_png( + [input_file], + output_prefix="async-output", + page_range="1-2", + resolution=450, + color_model="rgb", + smoothing=["all"], + ) + + assert seen == {"post": 1, "get": 1} + assert isinstance(response, PdfRestFileBasedResponse) + output_file = response.output_files[0] + assert output_file.name == "async-output-001.png" + assert output_file.name.startswith("async-output") + assert output_file.type == "image/png" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert str(response.input_id) == str(input_file.id) + assert response.warning is None + + +@pytest.mark.asyncio +async def test_async_convert_to_png_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "abcdb5f9-1234-4c67-98ef-abcdefabcdef" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 500 + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["format"] == "info" + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-custom-001.png", "image/png" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_png( + input_file, + resolution=500, + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "async"}, + extra_body={"debug": True}, + timeout=0.6, + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files[0].name == "async-custom-001.png" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.6) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.6) + + +@pytest.mark.asyncio +async def test_async_convert_to_png_validation_error( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + with pytest.raises(ValidationError, match="less than or equal to 2400"): + await client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=9000, + ) + + +def test_convert_to_png_sequence_arguments(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "1f9c6d0a-5ec4-4f6c-b1f2-bbbbbbbbbbbb" + + request_payload = PngPdfRestPayload.model_validate( + { + "files": [input_file], + "page_range": "1, 3", + "smoothing": "text", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.png", "image/png"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_png( + [input_file], + page_range="1, 3", + smoothing="text", + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == "example-001.png" + assert output_file.name.startswith("example") + assert output_file.type == "image/png" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert response.warning is None + + +def test_convert_to_png_defaults_excluded(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "2ab0c1d2-3e4f-4a5b-8c9d-dddddddddddd" + + request_payload = PngPdfRestPayload.model_validate( + { + "files": input_file, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/png": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "example-001.png", "image/png"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_png(input_file) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == "example-001.png" + assert output_file.name.startswith("example") + assert output_file.type == "image/png" + assert output_file.size == 256 + assert str(output_file.url).endswith(output_id) + assert response.warning is None + + +@pytest.mark.parametrize( + "invalid_color", + [pytest.param("cmyk", id="cmyk"), pytest.param("lab", id="lab")], +) +def test_convert_to_png_invalid_color_model( + monkeypatch: pytest.MonkeyPatch, invalid_color: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'rgb', 'rgba' or 'gray'"), + ), + ): + client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + color_model=invalid_color, # type: ignore[arg-type] + ) + + +def test_convert_to_png_invalid_smoothing_value( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'none', 'all', 'text', 'line' or 'image'"), + ), + ): + client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + smoothing="invalid", # type: ignore[arg-type] + ) + + +def test_convert_to_png_multiple_files_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + first = make_pdf_file(PdfRestFileID.generate(1)) + second = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at most 1 item after validation"), + ), + ): + client.convert_to_png([first, second]) + + +def test_convert_to_png_empty_page_range_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at least 1 item after validation"), + ), + ): + client.convert_to_png( + make_pdf_file(PdfRestFileID.generate(1)), + page_range=[], + ) + + +def test_live_convert_to_png(pdfrest_api_key: str, pdfrest_live_base_url: str) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource]) + response = client.convert_to_png( + uploaded[0], + output_prefix="live-convert", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files + + +@pytest.mark.asyncio +async def test_live_async_convert_to_png( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = await client.files.create_from_paths([resource]) + response = await client.convert_to_png( + uploaded[0], + output_prefix="live-async-convert", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files diff --git a/tests/test_convert_to_tiff.py b/tests/test_convert_to_tiff.py new file mode 100644 index 00000000..13fe836d --- /dev/null +++ b/tests/test_convert_to_tiff.py @@ -0,0 +1,565 @@ +from __future__ import annotations + +import json +import re + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import TiffPdfRestPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + assert_conversion_payload, + build_file_info_payload, + make_pdf_file, +) +from .resources import get_test_resource_path + + +@pytest.mark.parametrize( + "color_model", + ["rgb", "rgba", "cmyk", "lab", "gray"], +) +def test_convert_to_tiff_success( + monkeypatch: pytest.MonkeyPatch, color_model: str +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + expected_name = f"converted-{color_model}-001.tif" + + request_payload = TiffPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "converted", + "page_range": ["1", "last"], + "resolution": 600, + "color_model": color_model, + "smoothing": ["text", "image"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload(output_id, expected_name, "image/tiff"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_tiff( + input_file, + output_prefix="converted", + page_range=["1", "last"], + resolution=600, + color_model=color_model, # type: ignore[arg-type] + smoothing=["text", "image"], + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == expected_name + assert output_file.type == "image/tiff" + + +def test_convert_to_tiff_defaults_excluded(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "bcdefa23-4567-4bcd-af01-bbbbbbbbcccc" + + request_payload = TiffPdfRestPayload.model_validate( + {"files": input_file} + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "example-001.tif", "image/tiff" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_tiff(input_file) + + output_file = response.output_files[0] + assert output_file.name == "example-001.tif" + assert output_file.type == "image/tiff" + + +@pytest.mark.parametrize("resolution", [12, 2400]) +def test_convert_to_tiff_resolution_limits( + monkeypatch: pytest.MonkeyPatch, resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = TiffPdfRestPayload.model_validate( + { + "files": [input_file], + "resolution": resolution, + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, f"example-resolution-{resolution}.tif", "image/tiff" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_tiff( + input_file, + resolution=resolution, + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == f"example-resolution-{resolution}.tif" + + +@pytest.mark.parametrize("invalid_resolution", [11, 2401]) +def test_convert_to_tiff_resolution_out_of_bounds( + monkeypatch: pytest.MonkeyPatch, invalid_resolution: int +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=r"less than or equal to 2400|greater than or equal to 12", + ), + ): + client.convert_to_tiff( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=invalid_resolution, + ) + + +@pytest.mark.parametrize( + ("invalid_color", "message"), + [ + pytest.param( + "xyz", + "Input should be 'rgb', 'rgba', 'cmyk', 'lab' or 'gray'", + id="unknown", + ), + ], +) +def test_convert_to_tiff_invalid_color_model( + monkeypatch: pytest.MonkeyPatch, + invalid_color: str, + message: str, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape(message), + ), + ): + client.convert_to_tiff( + make_pdf_file(PdfRestFileID.generate(1)), + color_model=invalid_color, # type: ignore[arg-type] + ) + + +@pytest.mark.asyncio +async def test_async_convert_to_tiff_success( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "cdefab34-5678-4cde-b012-cdefabcdef34" + + request_payload = TiffPdfRestPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-output", + "page_range": "1-2", + "resolution": 500, + "color_model": "rgba", + "smoothing": ["all"], + } + ).model_dump(mode="json", by_alias=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-output-001.tif", "image/tiff" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_tiff( + [input_file], + output_prefix="async-output", + page_range="1-2", + resolution=500, + color_model="rgba", + smoothing=["all"], + ) + + assert seen == {"post": 1, "get": 1} + output_file = response.output_files[0] + assert output_file.name == "async-output-001.tif" + assert output_file.type == "image/tiff" + + +def test_convert_to_tiff_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "defabc45-6789-4def-9123-abcdefabcdef" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "tiff" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 520 + assert payload["color_model"] == "rgba" + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.url.params["format"] == "info" + assert request.headers["X-Debug"] == "tiff" + return httpx.Response( + 200, + json=build_file_info_payload(output_id, "custom-001.tif", "image/tiff"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_tiff( + input_file, + resolution=520, + color_model="rgba", + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "tiff"}, + extra_body={"debug": True}, + timeout=0.46, + ) + + assert response.output_files[0].name == "custom-001.tif" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.46) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.46) + + +def test_convert_to_tiff_validation_error(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match="less than or equal to 2400", + ), + ): + client.convert_to_tiff( + make_pdf_file(PdfRestFileID.generate(1)), + resolution=9999, + ) + + +def test_convert_to_tiff_invalid_smoothing_value( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("Input should be 'none', 'all', 'text', 'line' or 'image'"), + ), + ): + client.convert_to_tiff( + make_pdf_file(PdfRestFileID.generate(1)), + smoothing="invalid", # type: ignore[arg-type] + ) + + +def test_convert_to_tiff_multiple_files_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + first = make_pdf_file(PdfRestFileID.generate(1)) + second = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at most 1 item after validation"), + ), + ): + client.convert_to_tiff([first, second]) + + +def test_convert_to_tiff_empty_page_range_rejected( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + + def handler(_: httpx.Request) -> httpx.Response: + pytest.fail("Request should not be sent when validation fails.") + + transport = httpx.MockTransport(handler) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match=re.escape("List should have at least 1 item after validation"), + ), + ): + client.convert_to_tiff( + make_pdf_file(PdfRestFileID.generate(1)), + page_range=[], + ) + + +def test_convert_to_tiff_sequence_arguments(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "efabcd56-7890-4f12-a345-f1f2f3f4f5f6" + + request_payload = TiffPdfRestPayload.model_validate( + { + "files": [input_file], + "page_range": "1, 3", + "smoothing": "text", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_defaults=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert_conversion_payload(payload, request_payload) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "example-001.tif", "image/tiff" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.convert_to_tiff( + [input_file], + page_range="1, 3", + smoothing="text", + ) + + assert seen == {"post": 1, "get": 1} + assert response.output_files[0].name == "example-001.tif" + + +@pytest.mark.asyncio +async def test_async_convert_to_tiff_request_customization( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = "fabcd567-8901-4a23-b456-123412341234" + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/tif": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-tiff" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload["debug"] is True + assert payload["resolution"] == 540 + assert payload["color_model"] == "cmyk" + assert payload["id"] == str(input_file.id) + return httpx.Response( + 200, + json={"inputId": [input_file.id], "outputId": [output_id]}, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "async-tiff" + assert request.url.params["format"] == "info" + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-custom-001.tif", "image/tiff" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.convert_to_tiff( + input_file, + resolution=540, + color_model="cmyk", + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "async-tiff"}, + extra_body={"debug": True}, + timeout=0.62, + ) + + assert response.output_files[0].name == "async-custom-001.tif" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.62) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.62) + + +def test_live_convert_to_tiff( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = client.files.create_from_paths([resource]) + response = client.convert_to_tiff( + uploaded[0], + output_prefix="live-tiff", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files + + +@pytest.mark.asyncio +async def test_live_async_convert_to_tiff( + pdfrest_api_key: str, + pdfrest_live_base_url: str, +) -> None: + resource = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + uploaded = await client.files.create_from_paths([resource]) + response = await client.convert_to_tiff( + uploaded[0], + output_prefix="live-async-tiff", + page_range="1", + ) + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 00000000..be718d7c --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,1578 @@ +from __future__ import annotations + +import json +import uuid +from collections.abc import AsyncIterator, Iterator +from contextlib import AsyncExitStack, ExitStack +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, cast + +import httpx +import pytest +import pytest_asyncio + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFile, PdfRestFileID + +from .resources import get_test_resource_path + +VALID_API_KEY = "12345678-1234-1234-1234-123456789abc" + + +class _StaticStream(httpx.SyncByteStream): + def __init__(self, payload: bytes) -> None: + self._payload = payload + self._consumed = False + + def __iter__(self) -> Iterator[bytes]: + if self._consumed: + return iter(()) + self._consumed = True + return iter((self._payload,)) + + def close(self) -> None: # pragma: no cover - trivial + ... + + +class _StaticAsyncStream(httpx.AsyncByteStream): + def __init__(self, payload: bytes) -> None: + self._payload = payload + self._consumed = False + + async def __aiter__(self): + if not self._consumed: + self._consumed = True + yield self._payload + + async def aclose(self) -> None: # pragma: no cover - trivial + ... + + +def _build_file_info_payload(file_id: str, name: str) -> dict[str, Any]: + return { + "id": file_id, + "name": name, + "url": f"https://api.pdfrest.com/resource/{file_id}", + "type": "application/pdf" + if name.endswith(".pdf") + else "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "size": 1, + "modified": datetime(2024, 1, 1, tzinfo=timezone.utc) + .isoformat() + .replace("+00:00", "Z"), + "scheduledDeletionTimeUtc": None, + } + + +def _iso_to_datetime(value: str) -> datetime: + return datetime.fromisoformat(value.replace("Z", "+00:00")) + + +def _assert_file_matches_payload( + file_repr: PdfRestFile, expected_payload: dict[str, Any] +) -> None: + assert isinstance(file_repr, PdfRestFile) + assert file_repr.id == expected_payload["id"] + assert file_repr.name == expected_payload["name"] + assert str(file_repr.url) == expected_payload["url"] + assert file_repr.type == expected_payload["type"] + assert file_repr.size == expected_payload["size"] + assert file_repr.modified == _iso_to_datetime(expected_payload["modified"]) + assert file_repr.scheduled_deletion_time_utc is None + + +def _create_temp_text_file(tmp_path: Path, prefix: str) -> tuple[Path, str, bytes]: + filename = f"{prefix}.txt" + source_path = tmp_path / filename + source_content = f"{prefix}-line1\n{prefix}-line2\n" + source_path.write_text(source_content, encoding="utf-8") + return source_path, source_content, source_path.read_bytes() + + +@dataclass +class LiveFileData: + prefix: str + file: PdfRestFile + original_bytes: bytes + source_text: str + + +@pytest.fixture(scope="class") +def live_sync_file( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + tmp_path_factory: pytest.TempPathFactory, +) -> LiveFileData: + prefix = f"sync-live-{uuid.uuid4().hex}" + temp_dir = tmp_path_factory.mktemp(prefix) + source_path, source_text, source_bytes = _create_temp_text_file(temp_dir, prefix) + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + with source_path.open("rb") as source_file: + uploaded_files = client.files.create([source_file]) + file_repr = uploaded_files[0] + return LiveFileData( + prefix=prefix, + file=file_repr, + original_bytes=source_bytes, + source_text=source_text, + ) + + +@pytest.fixture(scope="class") +def live_async_file( + pdfrest_api_key: str, + pdfrest_live_base_url: str, + tmp_path_factory: pytest.TempPathFactory, +) -> LiveFileData: + prefix = f"async-live-{uuid.uuid4().hex}" + temp_dir = tmp_path_factory.mktemp(prefix) + source_path, source_text, source_bytes = _create_temp_text_file(temp_dir, prefix) + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + with source_path.open("rb") as source_file: + uploaded_files = client.files.create([source_file]) + file_repr = uploaded_files[0] + return LiveFileData( + prefix=prefix, + file=file_repr, + original_bytes=source_bytes, + source_text=source_text, + ) + + +@pytest.mark.parametrize( + "file_ref", + [ + pytest.param(PdfRestFileID.generate(), id="pdfrest-file-id"), + pytest.param(str(uuid.uuid4()), id="raw-str"), + ], +) +def test_files_get_fetches_info(file_ref: PdfRestFileID | str) -> None: + file_id = str(file_ref) + info_payload = _build_file_info_payload(file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + file_repr = client.files.get(file_ref) + + _assert_file_matches_payload(file_repr, info_payload) + + +def test_files_get_request_customization() -> None: + file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(file_id, "report.pdf") + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["format"] == "info" + assert request.headers["X-Trace"] == "1" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + file_repr = client.files.get( + file_id, + extra_headers={"X-Trace": "1"}, + timeout=0.6, + ) + + _assert_file_matches_payload(file_repr, info_payload) + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.6) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.6) + + +def test_files_get_rejects_invalid_id() -> None: + transport = httpx.MockTransport( + lambda request: (_ for _ in ()).throw( + AssertionError("Request should not be sent for invalid IDs.") + ) + ) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValueError, match="Invalid PdfRestPrefixedUUID4"), + ): + client.files.get("not-a-valid-id") + + +def test_files_create_uses_upload_and_info() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if ( + request.method == "GET" + and request.url.path == f"/resource/{uploaded_file_id}" + ): + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + report_pdf.open("rb") as pdf_file, + ): + response = client.files.create([("report.pdf", pdf_file)]) + + assert isinstance(response, list) + assert len(response) == 1 + file_repr = response[0] + assert isinstance(file_repr, PdfRestFile) + _assert_file_matches_payload(file_repr, info_payload) + + +def test_files_create_request_customization() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + assert request.url.params["mode"] == "extended" + assert request.headers["X-Upload-Token"] == "token" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if ( + request.method == "GET" + and request.url.path == f"/resource/{uploaded_file_id}" + ): + assert request.url.params["format"] == "info" + assert request.url.params["mode"] == "extended" + assert request.headers["X-Upload-Token"] == "token" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create( + [("report.pdf", b"payload")], + extra_query={"mode": "extended"}, + extra_headers={"X-Upload-Token": "token"}, + timeout=0.75, + ) + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.75) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.75) + + +def test_download_file_request_customization() -> None: + file_id = str(uuid.uuid4()) + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + assert request.method == "GET" + assert request.url.path == f"/resource/{file_id}" + assert request.url.params["mode"] == "raw" + assert request.headers["X-Trace"] == "1" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, content=b"content") + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.download_file( + file_id, + extra_query={"mode": "raw"}, + extra_headers={"X-Trace": "1"}, + timeout=1.25, + ) + data = response.read() + response.close() + + assert data == b"content" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(1.25) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(1.25) + + +def test_files_read_bytes_request_customization() -> None: + file_id = str(uuid.uuid4()) + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["mode"] == "raw" + assert request.headers["X-Trace"] == "1" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, content=b"payload") + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + data = client.files.read_bytes( + file_id, + extra_query={"mode": "raw"}, + extra_headers={"X-Trace": "1"}, + timeout=0.4, + ) + + assert data == b"payload" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.4) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.4) + + +def test_files_create_from_paths_uses_upload_and_info() -> None: + uploaded_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + info_payloads = { + uploaded_ids[0]: _build_file_info_payload(uploaded_ids[0], "report.pdf"), + uploaded_ids[1]: _build_file_info_payload(uploaded_ids[1], "report.docx"), + } + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + body = request.content + assert body.count(b'name="file"') == 2 + assert b'filename="report.pdf"' in body + assert b'filename="report.docx"' in body + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_ids[0]}, + {"name": "report.docx", "id": uploaded_ids[1]}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + file_id = request.url.path.split("/")[-1] + assert request.url.params["format"] == "info" + payload = info_payloads[file_id] + return httpx.Response(200, json=payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_paths([report_pdf, report_docx]) + + assert isinstance(response, list) + assert len(response) == 2 + for file_repr in response: + payload = info_payloads[file_repr.id] + _assert_file_matches_payload(file_repr, payload) + + +def test_files_create_from_paths_single_path() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + body = request.content + assert body.count(b'name="file"') == 1 + assert b'filename="report.pdf"' in body + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_paths(report_pdf) + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + + +def test_files_create_from_urls_uses_upload_and_info() -> None: + uploaded_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + info_payloads = { + uploaded_ids[0]: _build_file_info_payload(uploaded_ids[0], "report.pdf"), + uploaded_ids[1]: _build_file_info_payload(uploaded_ids[1], "report.docx"), + } + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload["url"] == [ + "https://example.com/report.pdf", + "https://example.com/report.docx", + ] + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_ids[0]}, + {"name": "report.docx", "id": uploaded_ids[1]}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + file_id = request.url.path.split("/")[-1] + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payloads[file_id]) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_urls( + [ + "https://example.com/report.pdf", + httpx.URL("https://example.com/report.docx"), + ] + ) + + assert len(response) == 2 + for file_repr in response: + payload = info_payloads[file_repr.id] + _assert_file_matches_payload(file_repr, payload) + + +def test_files_create_from_urls_single_url() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload["url"] == ["https://example.com/report.pdf"] + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_urls("https://example.com/report.pdf") + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + + +def test_files_create_from_urls_extra_body() -> None: + uploaded_file_id = str(uuid.uuid4()) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "url": ["https://example.com/report.pdf"], + "metadata": {"source": "test"}, + } + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response( + 200, json=_build_file_info_payload(uploaded_file_id, "report.pdf") + ) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_urls( + "https://example.com/report.pdf", + extra_body={"metadata": {"source": "test"}}, + ) + + assert len(response) == 1 + assert response[0].id == uploaded_file_id + + +def test_files_create_from_paths_supports_metadata() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + body = request.content + assert b'filename="report.pdf"' in body + assert b"Content-Type: application/test-pdf" in body + assert b"X-Custom: header" in body + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET": + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.files.create_from_paths( + [ + ( + report_pdf, + "application/test-pdf", + {"X-Custom": "header"}, + ) + ] + ) + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + + +class TestDownloadHelpers: + @pytest.fixture + def client(self) -> Iterator[tuple[PdfRestClient, bytes, dict[str, Any]]]: + binary_content = b"line1\nline2\n" + json_payload: dict[str, Any] = {"message": "hi"} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == "/resource/file-id": + return httpx.Response(200, stream=_StaticStream(binary_content)) + if request.method == "GET" and request.url.path == "/resource/file-id-json": + payload = json.dumps(json_payload).encode("utf-8") + return httpx.Response(200, stream=_StaticStream(payload)) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient( + api_key=VALID_API_KEY, transport=transport + ) as pdfrest_client: + yield pdfrest_client, binary_content, json_payload + + def test_read_bytes( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + assert pdfrest_client.files.read_bytes("file-id") == binary_content + + def test_read_text( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + assert pdfrest_client.files.read_text("file-id") == binary_content.decode() + + def test_read_json( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, _, json_payload = client + assert pdfrest_client.files.read_json("file-id-json") == json_payload + + def test_write_bytes( + self, + client: tuple[PdfRestClient, bytes, dict[str, Any]], + tmp_path: Path, + ) -> None: + pdfrest_client, binary_content, _ = client + destination = tmp_path / "download.bin" + written_path = pdfrest_client.files.write_bytes("file-id", destination) + assert written_path.read_bytes() == binary_content + + def test_stream_iter_raw( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + with pdfrest_client.files.stream("file-id") as stream: + raw_chunks = list(stream.iter_raw()) + assert b"".join(raw_chunks) == binary_content + + def test_stream_iter_bytes( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + with pdfrest_client.files.stream("file-id") as stream: + chunks = list(stream.iter_bytes()) + assert b"".join(chunks) == binary_content + + def test_stream_iter_text( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + with pdfrest_client.files.stream("file-id") as stream: + text_chunks = list(stream.iter_text()) + assert "".join(text_chunks) == binary_content.decode() + + def test_stream_iter_lines( + self, client: tuple[PdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + with pdfrest_client.files.stream("file-id") as stream: + lines = list(stream.iter_lines()) + assert lines == binary_content.decode().splitlines() + + +@pytest.mark.asyncio +class TestAsyncDownloadHelpers: + @pytest_asyncio.fixture + async def client( + self, + ) -> AsyncIterator[tuple[AsyncPdfRestClient, bytes, dict[str, Any]]]: + binary_content = b"line1\nline2\n" + json_payload: dict[str, Any] = {"message": "hi"} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == "/resource/file-id": + return httpx.Response(200, stream=_StaticAsyncStream(binary_content)) + if request.method == "GET" and request.url.path == "/resource/file-id-json": + payload = json.dumps(json_payload).encode("utf-8") + return httpx.Response(200, stream=_StaticAsyncStream(payload)) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient( + api_key=VALID_API_KEY, transport=transport + ) as pdfrest_client: + yield pdfrest_client, binary_content, json_payload + + async def test_read_bytes( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + assert await pdfrest_client.files.read_bytes("file-id") == binary_content + + async def test_read_text( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + assert ( + await pdfrest_client.files.read_text("file-id") == binary_content.decode() + ) + + async def test_read_json( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, _, json_payload = client + assert await pdfrest_client.files.read_json("file-id-json") == json_payload + + async def test_write_bytes( + self, + client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]], + tmp_path: Path, + ) -> None: + pdfrest_client, binary_content, _ = client + destination = tmp_path / "async-download.bin" + written_path = await pdfrest_client.files.write_bytes("file-id", destination) + assert written_path.read_bytes() == binary_content + + async def test_stream_iter_raw( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + async with await pdfrest_client.files.stream("file-id") as stream: + raw_chunks = [chunk async for chunk in stream.iter_raw()] + assert b"".join(raw_chunks) == binary_content + + async def test_stream_iter_bytes( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + async with await pdfrest_client.files.stream("file-id") as stream: + chunks = [chunk async for chunk in stream.iter_bytes()] + assert b"".join(chunks) == binary_content + + async def test_stream_iter_text( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + async with await pdfrest_client.files.stream("file-id") as stream: + text_chunks = [chunk async for chunk in stream.iter_text()] + assert "".join(text_chunks) == binary_content.decode() + + async def test_stream_iter_lines( + self, client: tuple[AsyncPdfRestClient, bytes, dict[str, Any]] + ) -> None: + pdfrest_client, binary_content, _ = client + async with await pdfrest_client.files.stream("file-id") as stream: + lines = [line async for line in stream.iter_lines()] + assert lines == binary_content.decode().splitlines() + + +@pytest.mark.asyncio +async def test_async_files_create_from_urls_invalid_scheme() -> None: + transport = httpx.MockTransport(lambda request: httpx.Response(400)) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + with pytest.raises(ValueError, match=r"URL scheme should be 'http' or 'https'"): + await client.files.create_from_urls("ftp://example.com/file.pdf") + + +def test_files_create_rejects_empty_input() -> None: + with PdfRestClient( + api_key=VALID_API_KEY, + transport=httpx.MockTransport(lambda _: httpx.Response(200)), + ) as client: + with pytest.raises( + TypeError, + match=r"Upload files must be provided as a sequence or a single file specification\.", + ): + client.files.create(cast(Any, {})) + with pytest.raises(ValueError, match=r"At least one file must be provided\."): + client.files.create([]) + with pytest.raises( + ValueError, match=r"At least one file path must be provided\." + ): + client.files.create_from_paths([]) + with pytest.raises( + ValueError, + match=r"Value should have at least 1 item after validation, not 0", + ): + client.files.create_from_urls([]) + + +@pytest.mark.asyncio +async def test_async_files_create_uses_upload_and_info() -> None: + uploaded_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + info_payloads = { + uploaded_ids[0]: _build_file_info_payload(uploaded_ids[0], "report.pdf"), + uploaded_ids[1]: _build_file_info_payload(uploaded_ids[1], "report.docx"), + } + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_ids[0]}, + {"name": "report.docx", "id": uploaded_ids[1]}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + file_id = request.url.path.split("/")[-1] + assert request.url.params["format"] == "info" + payload = info_payloads[file_id] + return httpx.Response(200, json=payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + with report_pdf.open("rb") as pdf_file, report_docx.open("rb") as docx_file: + response = await client.files.create( + [ + ("report.pdf", pdf_file), + ("report.docx", docx_file), + ] + ) + + assert isinstance(response, list) + assert len(response) == 2 + for file_repr in response: + payload = info_payloads[file_repr.id] + _assert_file_matches_payload(file_repr, payload) + + +@pytest.mark.asyncio +async def test_async_files_get_request_customization() -> None: + file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(file_id, "report.pdf") + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["format"] == "info" + assert request.headers["X-Trace"] == "async" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + file_repr = await client.files.get( + file_id, + extra_headers={"X-Trace": "async"}, + timeout=0.55, + ) + + _assert_file_matches_payload(file_repr, info_payload) + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.55) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.55) + + +@pytest.mark.asyncio +async def test_async_files_create_request_customization() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + assert request.url.params["mode"] == "extended" + assert request.headers["X-Upload-Token"] == "token" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if ( + request.method == "GET" + and request.url.path == f"/resource/{uploaded_file_id}" + ): + assert request.url.params["format"] == "info" + assert request.url.params["mode"] == "extended" + assert request.headers["X-Upload-Token"] == "token" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create( + [("report.pdf", b"payload")], + extra_query={"mode": "extended"}, + extra_headers={"X-Upload-Token": "token"}, + timeout=0.5, + ) + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.5) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.5) + + +@pytest.mark.asyncio +async def test_async_download_file_request_customization() -> None: + file_id = str(uuid.uuid4()) + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + assert request.method == "GET" + assert request.url.path == f"/resource/{file_id}" + assert request.url.params["mode"] == "raw" + assert request.headers["X-Trace"] == "async" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, content=b"content") + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.download_file( + file_id, + extra_query={"mode": "raw"}, + extra_headers={"X-Trace": "async"}, + timeout=0.9, + ) + data = await response.aread() + await response.aclose() + + assert data == b"content" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.9) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.9) + + +@pytest.mark.asyncio +async def test_async_files_read_bytes_request_customization() -> None: + file_id = str(uuid.uuid4()) + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["mode"] == "raw" + assert request.headers["X-Trace"] == "async" + captured_timeout["value"] = request.extensions.get("timeout") + return httpx.Response(200, content=b"payload") + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + data = await client.files.read_bytes( + file_id, + extra_query={"mode": "raw"}, + extra_headers={"X-Trace": "async"}, + timeout=0.35, + ) + + assert data == b"payload" + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.35) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.35) + + +@pytest.mark.asyncio +async def test_async_files_create_from_urls() -> None: + uploaded_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + info_payloads = { + uploaded_ids[0]: _build_file_info_payload(uploaded_ids[0], "report.pdf"), + uploaded_ids[1]: _build_file_info_payload(uploaded_ids[1], "report.docx"), + } + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload["url"] == [ + "https://example.com/report.pdf", + "https://example.com/report.docx", + ] + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_ids[0]}, + {"name": "report.docx", "id": uploaded_ids[1]}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + file_id = request.url.path.split("/")[-1] + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payloads[file_id]) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create_from_urls( + [ + "https://example.com/report.pdf", + httpx.URL("https://example.com/report.docx"), + ] + ) + + assert len(response) == 2 + for file_repr in response: + payload = info_payloads[file_repr.id] + _assert_file_matches_payload(file_repr, payload) + + +@pytest.mark.asyncio +async def test_async_files_create_from_urls_single_url() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload["url"] == ["https://example.com/report.pdf"] + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create_from_urls("https://example.com/report.pdf") + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + + +@pytest.mark.asyncio +async def test_async_files_create_from_urls_extra_body() -> None: + uploaded_file_id = str(uuid.uuid4()) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "url": ["https://example.com/report.pdf"], + "metadata": {"source": "async-test"}, + } + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response( + 200, json=_build_file_info_payload(uploaded_file_id, "report.pdf") + ) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create_from_urls( + "https://example.com/report.pdf", + extra_body={"metadata": {"source": "async-test"}}, + ) + + assert len(response) == 1 + assert response[0].id == uploaded_file_id + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "file_ref", + [ + pytest.param(PdfRestFileID.generate(), id="pdfrest-file-id"), + pytest.param(str(uuid.uuid4()), id="raw-str"), + ], +) +async def test_async_files_get_fetches_info(file_ref: PdfRestFileID | str) -> None: + file_id = str(file_ref) + info_payload = _build_file_info_payload(file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "GET" and request.url.path == f"/resource/{file_id}": + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + file_repr = await client.files.get(file_ref) + + _assert_file_matches_payload(file_repr, info_payload) + + +@pytest.mark.asyncio +async def test_async_files_get_rejects_invalid_id() -> None: + transport = httpx.MockTransport( + lambda request: (_ for _ in ()).throw( + AssertionError("Request should not be sent for invalid IDs.") + ) + ) + + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + with pytest.raises(ValueError, match="Invalid PdfRestPrefixedUUID4"): + await client.files.get("not-a-valid-id") + + +@pytest.mark.asyncio +async def test_async_files_create_from_paths() -> None: + uploaded_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + info_payloads = { + uploaded_ids[0]: _build_file_info_payload(uploaded_ids[0], "report.pdf"), + uploaded_ids[1]: _build_file_info_payload(uploaded_ids[1], "report.docx"), + } + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + body = request.content + assert body.count(b'name="file"') == 2 + assert b'filename="report.pdf"' in body + assert b'filename="report.docx"' in body + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_ids[0]}, + {"name": "report.docx", "id": uploaded_ids[1]}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + file_id = request.url.path.split("/")[-1] + assert request.url.params["format"] == "info" + payload = info_payloads[file_id] + return httpx.Response(200, json=payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create_from_paths([report_pdf, report_docx]) + + assert isinstance(response, list) + assert len(response) == 2 + for file_repr in response: + payload = info_payloads[file_repr.id] + _assert_file_matches_payload(file_repr, payload) + + +@pytest.mark.asyncio +async def test_async_files_create_from_paths_single_path() -> None: + uploaded_file_id = str(uuid.uuid4()) + info_payload = _build_file_info_payload(uploaded_file_id, "report.pdf") + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/upload": + body = request.content + assert body.count(b'name="file"') == 1 + assert b'filename="report.pdf"' in body + return httpx.Response( + 200, + json={ + "files": [ + {"name": "report.pdf", "id": uploaded_file_id}, + ] + }, + ) + if request.method == "GET" and request.url.path.startswith("/resource/"): + assert request.url.params["format"] == "info" + return httpx.Response(200, json=info_payload) + msg = f"Unexpected request: {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + report_pdf = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = await client.files.create_from_paths(report_pdf) + + assert len(response) == 1 + _assert_file_matches_payload(response[0], info_payload) + + +def test_live_file_create(pdfrest_api_key: str, pdfrest_live_base_url: str) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + report_pdf = get_test_resource_path("report.pdf") + with report_pdf.open("rb") as pdf_file: + response = client.files.create([pdf_file]) + assert isinstance(response, list) + assert len(response) == 1 + file_repr = response[0] + assert isinstance(file_repr, PdfRestFile) + assert file_repr.id + assert file_repr.name + + +def test_live_file_create_two_files( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + with report_pdf.open("rb") as pdf_file, report_docx.open("rb") as docx_file: + response = client.files.create([pdf_file, docx_file]) + assert isinstance(response, list) + assert len(response) == 2 + names = {file_repr.name for file_repr in response} + assert {"report.pdf", "report.docx"} <= names + + +def test_live_file_create_from_paths( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + response = client.files.create_from_paths([report_pdf, report_docx]) + assert isinstance(response, list) + assert len(response) == 2 + names = {file_repr.name for file_repr in response} + assert { + "report.pdf", + "report.docx", + } <= names + + +def test_live_file_create_from_urls( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + base_files = client.files.create_from_paths([report_pdf, report_docx]) + source_urls = [str(file_repr.url) for file_repr in base_files] + response = client.files.create_from_urls(source_urls) + assert isinstance(response, list) + assert len(response) == 2 + names = {file_repr.name for file_repr in response} + assert {"report.pdf", "report.docx"} <= names + + +class TestLiveFileDownloads: + def test_read_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + assert ( + client.files.read_bytes(live_sync_file.file.id) + == live_sync_file.original_bytes + ) + + def test_read_text( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + assert ( + client.files.read_text(live_sync_file.file.id) + == live_sync_file.source_text + ) + + def test_write_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + tmp_path: Path, + live_sync_file: LiveFileData, + ) -> None: + destination = tmp_path / f"{live_sync_file.prefix}-download.bin" + with PdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + written_path = client.files.write_bytes( + live_sync_file.file.id, str(destination) + ) + assert written_path == destination + assert written_path.read_bytes() == live_sync_file.original_bytes + + def test_stream_iter_raw( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with ExitStack() as stack: + client = stack.enter_context( + PdfRestClient(api_key=pdfrest_api_key, base_url=pdfrest_live_base_url) + ) + stream = stack.enter_context(client.files.stream(live_sync_file.file.id)) + raw_chunks = list(stream.iter_raw()) + assert b"".join(raw_chunks) == live_sync_file.original_bytes + + def test_stream_iter_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with ExitStack() as stack: + client = stack.enter_context( + PdfRestClient(api_key=pdfrest_api_key, base_url=pdfrest_live_base_url) + ) + stream = stack.enter_context(client.files.stream(live_sync_file.file.id)) + chunks = list(stream.iter_bytes(chunk_size=None)) + assert b"".join(chunks) == live_sync_file.original_bytes + + def test_stream_iter_text( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with ExitStack() as stack: + client = stack.enter_context( + PdfRestClient(api_key=pdfrest_api_key, base_url=pdfrest_live_base_url) + ) + stream = stack.enter_context(client.files.stream(live_sync_file.file.id)) + text_chunks = list(stream.iter_text(chunk_size=None)) + assert "".join(text_chunks) == live_sync_file.source_text + + def test_stream_iter_lines( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_sync_file: LiveFileData, + ) -> None: + with ExitStack() as stack: + client = stack.enter_context( + PdfRestClient(api_key=pdfrest_api_key, base_url=pdfrest_live_base_url) + ) + stream = stack.enter_context(client.files.stream(live_sync_file.file.id)) + lines = list(stream.iter_lines()) + assert lines == live_sync_file.source_text.splitlines() + + +@pytest.mark.asyncio +async def test_live_async_file_create( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + report_pdf = get_test_resource_path("report.pdf") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + with report_pdf.open("rb") as pdf_file: + response = await client.files.create([pdf_file]) + assert isinstance(response, list) + assert len(response) == 1 + file_repr = response[0] + assert isinstance(file_repr, PdfRestFile) + assert file_repr.id + assert file_repr.name + + +@pytest.mark.asyncio +async def test_live_async_file_create_from_paths( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + response = await client.files.create_from_paths([report_pdf, report_docx]) + assert isinstance(response, list) + assert len(response) == 2 + names = {file_repr.name for file_repr in response} + assert { + "report.pdf", + "report.docx", + } <= names + + +class TestLiveAsyncFileDownloads: + @pytest.mark.asyncio + async def test_read_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + assert ( + await client.files.read_bytes(live_async_file.file.id) + == live_async_file.original_bytes + ) + + @pytest.mark.asyncio + async def test_read_text( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + assert ( + await client.files.read_text(live_async_file.file.id) + == live_async_file.source_text + ) + + @pytest.mark.asyncio + async def test_write_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + tmp_path: Path, + live_async_file: LiveFileData, + ) -> None: + destination = tmp_path / f"{live_async_file.prefix}-download.bin" + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + written_path = await client.files.write_bytes( + live_async_file.file, destination + ) + assert written_path == destination + assert written_path.read_bytes() == live_async_file.original_bytes + + @pytest.mark.asyncio + async def test_stream_iter_raw( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncExitStack() as stack: + client = await stack.enter_async_context( + AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) + ) + stream_cm = await client.files.stream(live_async_file.file.id) + stream = await stack.enter_async_context(stream_cm) + raw_chunks = [chunk async for chunk in stream.iter_raw()] + assert b"".join(raw_chunks) == live_async_file.original_bytes + + @pytest.mark.asyncio + async def test_stream_iter_bytes( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncExitStack() as stack: + client = await stack.enter_async_context( + AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) + ) + stream_cm = await client.files.stream(live_async_file.file.id) + stream = await stack.enter_async_context(stream_cm) + chunks = [chunk async for chunk in stream.iter_bytes(chunk_size=None)] + assert b"".join(chunks) == live_async_file.original_bytes + + @pytest.mark.asyncio + async def test_stream_iter_text( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncExitStack() as stack: + client = await stack.enter_async_context( + AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) + ) + stream_cm = await client.files.stream(live_async_file.file.id) + stream = await stack.enter_async_context(stream_cm) + text_chunks = [chunk async for chunk in stream.iter_text(chunk_size=None)] + assert "".join(text_chunks) == live_async_file.source_text + + @pytest.mark.asyncio + async def test_stream_iter_lines( + self, + pdfrest_api_key: str, + pdfrest_live_base_url: str, + live_async_file: LiveFileData, + ) -> None: + async with AsyncExitStack() as stack: + client = await stack.enter_async_context( + AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) + ) + stream_cm = await client.files.stream(live_async_file.file.id) + stream = await stack.enter_async_context(stream_cm) + lines = [line async for line in stream.iter_lines()] + assert lines == live_async_file.source_text.splitlines() + + +@pytest.mark.asyncio +async def test_live_async_file_create_from_urls( + pdfrest_api_key: str, pdfrest_live_base_url: str +) -> None: + report_pdf = get_test_resource_path("report.pdf") + report_docx = get_test_resource_path("report.docx") + async with AsyncPdfRestClient( + api_key=pdfrest_api_key, base_url=pdfrest_live_base_url + ) as client: + base_files = await client.files.create_from_paths([report_pdf, report_docx]) + source_urls = [str(file_repr.url) for file_repr in base_files] + response = await client.files.create_from_urls(source_urls) + assert isinstance(response, list) + assert len(response) == 2 + names = {file_repr.name for file_repr in response} + assert {"report.pdf", "report.docx"} <= names diff --git a/tests/test_graphic_payload_validation.py b/tests/test_graphic_payload_validation.py new file mode 100644 index 00000000..cea6c5a1 --- /dev/null +++ b/tests/test_graphic_payload_validation.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +import re +from typing import Any + +import pytest +from pydantic import ValidationError + +from pdfrest.models._internal import ( + BasePdfRestGraphicPayload, + BmpPdfRestPayload, + GifPdfRestPayload, + JpegPdfRestPayload, + PngPdfRestPayload, + TiffPdfRestPayload, +) + +from .graphics_test_helpers import make_pdf_file + +PAYLOAD_MODELS: tuple[type[BasePdfRestGraphicPayload[Any]], ...] = ( + PngPdfRestPayload, + BmpPdfRestPayload, + GifPdfRestPayload, + JpegPdfRestPayload, + TiffPdfRestPayload, +) + + +@pytest.mark.parametrize("payload_model", PAYLOAD_MODELS) +def test_graphic_payload_accepts_page_range_variants( + payload_model: type[BasePdfRestGraphicPayload[Any]], +) -> None: + payload = payload_model.model_validate( + { + "files": [make_pdf_file("12345678-1234-4abc-8def-1234567890ab")], + "page_range": [1, "last", "6-last"], + } + ) + + data = payload.model_dump(mode="json", by_alias=True, exclude_none=True) + assert data["pages"] == "1,last,6-last" + + +@pytest.mark.parametrize("payload_model", PAYLOAD_MODELS) +@pytest.mark.parametrize( + ("bad_prefix", "expected"), + [ + pytest.param( + ".hidden", + "The output prefix must not start with a `.`.", + id="leading-dot", + ), + pytest.param( + "profile.json", + "The output prefix is a reserved name.", + id="reserved-profile", + ), + pytest.param( + "metadata.json", + "The output prefix is a reserved name.", + id="reserved-metadata", + ), + pytest.param( + "invalid!name", + "The output prefix must not contain special characters: '!'.", + id="special-char", + ), + pytest.param( + "nested/path", + "The output prefix must not contain a directory separator.", + id="directory-separator", + ), + ], +) +def test_graphic_payload_invalid_output_prefix( + payload_model: type[BasePdfRestGraphicPayload[Any]], + bad_prefix: str, + expected: str, +) -> None: + with pytest.raises(ValidationError, match=re.escape(expected)): + payload_model.model_validate( + { + "files": [make_pdf_file("12345678-1234-4abc-8def-1234567890ab")], + "output_prefix": bad_prefix, + } + ) + + +@pytest.mark.parametrize("payload_model", PAYLOAD_MODELS) +@pytest.mark.parametrize( + ("bad_page_range", "expected"), + [ + pytest.param( + "0", + "greater than or equal to 1", + id="scalar-zero", + ), + pytest.param( + ["0"], + "greater than or equal to 1", + id="list-zero-string", + ), + pytest.param( + [0], + "greater than or equal to 1", + id="list-zero-int", + ), + pytest.param( + "last-5", + "unable to parse string as an integer", + id="range-last-to-number", + ), + pytest.param( + "3-2", + "less than or equal to the end page", + id="range-descending", + ), + pytest.param( + "foo", + "unable to parse string as an integer", + id="scalar-word", + ), + pytest.param( + ["1", "foo"], + "unable to parse string as an integer", + id="list-mixed", + ), + pytest.param( + "even", + "unable to parse string as an integer", + id="even", + ), + pytest.param( + "odd", + "unable to parse string as an integer", + id="odd", + ), + ], +) +def test_graphic_payload_invalid_page_range_value( + payload_model: type[BasePdfRestGraphicPayload[Any]], + bad_page_range: object, + expected: str, +) -> None: + with pytest.raises(ValidationError, match=re.escape(expected)): + payload_model.model_validate( + { + "files": [make_pdf_file("12345678-1234-4abc-8def-1234567890ab")], + "page_range": bad_page_range, + } + ) + + +@pytest.mark.parametrize("payload_model", PAYLOAD_MODELS) +@pytest.mark.parametrize( + ("page_range", "expected_message"), + [ + pytest.param( + "even", + "unable to parse string as an integer", + id="even", + ), + pytest.param( + "odd", + "unable to parse string as an integer", + id="odd", + ), + pytest.param( + "5-3", + "less than or equal to the end page", + id="descending", + ), + ], +) +def test_graphic_payload_rejects_special_ranges( + payload_model: type[BasePdfRestGraphicPayload[Any]], + page_range: str, + expected_message: str, +) -> None: + with pytest.raises( + ValidationError, + match=expected_message, + ): + payload_model.model_validate( + { + "files": [make_pdf_file("12345678-1234-4abc-8def-1234567890ab")], + "page_range": page_range, + } + ) diff --git a/tests/test_merge_pdfs.py b/tests/test_merge_pdfs.py new file mode 100644 index 00000000..64755a9f --- /dev/null +++ b/tests/test_merge_pdfs.py @@ -0,0 +1,155 @@ +from __future__ import annotations + +import json + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import PdfMergePayload +from pdfrest.types import PdfMergeInput + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + build_file_info_payload, + make_pdf_file, +) + + +def test_merge_pdfs_success(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + source_a = make_pdf_file(PdfRestFileID.generate(1), name="a.pdf") + source_b = make_pdf_file(PdfRestFileID.generate(1), name="b.pdf") + source_c = make_pdf_file(PdfRestFileID.generate(1), name="c.pdf") + output_id = str(PdfRestFileID.generate()) + + merge_sources: list[PdfMergeInput] = [ + {"file": source_a, "pages": "even"}, + source_b, + (source_c, ("9-2", "odd")), + ] + + pdf_merge_payload = PdfMergePayload.model_validate( + { + "sources": merge_sources, + "output_prefix": "merged-output", + } + ) + request_payload = pdf_merge_payload.model_dump( + mode="json", by_alias=True, exclude_none=True, exclude_unset=True + ) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/merged-pdf": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert payload == request_payload + return httpx.Response( + 200, + json={ + "inputId": [source_a.id, source_b.id, source_c.id], + "outputId": output_id, + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + seen["get"] += 1 + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "merged-output.pdf", "application/pdf" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.merge_pdfs(merge_sources, output_prefix="merged-output") + + assert seen == {"post": 1, "get": 1} + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_file.name == "merged-output.pdf" + assert response.output_file.type == "application/pdf" + assert len(response.input_ids) == 3 + assert {str(input_id) for input_id in response.input_ids} == { + str(source_a.id), + str(source_b.id), + str(source_c.id), + } + assert response.warning is None + + +def test_merge_pdfs_requires_multiple_sources(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + single_source = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="at least 2"), + ): + client.merge_pdfs([single_source]) + + +def test_merge_pdfs_invalid_page_range(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + source_a = make_pdf_file(PdfRestFileID.generate(1)) + source_b = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="greater than or equal to 1"), + ): + client.merge_pdfs([source_a, (source_b, 0)]) + + +@pytest.mark.asyncio +async def test_async_merge_pdfs(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + source_a = make_pdf_file(PdfRestFileID.generate(1)) + source_b = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = PdfMergePayload.model_validate( + { + "sources": [source_a, {"file": source_b, "pages": "2-last"}], + "output_prefix": "async-merge", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/merged-pdf": + payload = json.loads(request.content.decode("utf-8")) + assert payload == request_payload + return httpx.Response( + 200, + json={ + "inputId": [source_a.id, source_b.id], + "outputId": output_id, + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-merge.pdf", "application/pdf" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.merge_pdfs( + [source_a, {"file": source_b, "pages": "2-last"}], + output_prefix="async-merge", + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_file.name == "async-merge.pdf" diff --git a/tests/test_pdf_redaction_apply.py b/tests/test_pdf_redaction_apply.py new file mode 100644 index 00000000..84fd991c --- /dev/null +++ b/tests/test_pdf_redaction_apply.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import json + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import PdfRedactionApplyPayload +from pdfrest.types import PdfRGBColor + +from .graphics_test_helpers import VALID_API_KEY, build_file_info_payload, make_pdf_file + + +@pytest.mark.parametrize( + "rgb_color", + [ + pytest.param((255, 255, 255), id="tuple"), + pytest.param([10, 20, 30], id="list"), + pytest.param(None, id="none"), + ], +) +def test_apply_redactions_success( + monkeypatch: pytest.MonkeyPatch, + rgb_color: PdfRGBColor | list[int] | None, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + payload_data: dict[str, object] = {"files": [input_file]} + if rgb_color is not None: + payload_data["rgb_color"] = rgb_color + payload_data["output"] = "final-output" + + payload_model_dump = PdfRedactionApplyPayload.model_validate( + payload_data + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + def handler(request: httpx.Request) -> httpx.Response: + if ( + request.method == "POST" + and request.url.path == "/pdf-with-redacted-text-applied" + ): + body = json.loads(request.content.decode("utf-8")) + assert body == payload_model_dump + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "final-output.pdf", "application/pdf" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.apply_redactions( + input_file, + rgb_color=rgb_color, + output="final-output", + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files[0].name == "final-output.pdf" + assert response.output_files[0].type == "application/pdf" + + +def test_apply_redactions_invalid_color(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + with pytest.raises(ValidationError, match="Field required"): + client.apply_redactions(input_file, rgb_color=[255, 255]) + + with pytest.raises(ValidationError, match="greater than or equal to 0"): + client.apply_redactions(input_file, rgb_color=[-1, 0, 0]) diff --git a/tests/test_pdf_redaction_preview.py b/tests/test_pdf_redaction_preview.py new file mode 100644 index 00000000..909d35c4 --- /dev/null +++ b/tests/test_pdf_redaction_preview.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import json + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import PdfRedactionPreviewPayload +from pdfrest.types import PdfRedactionInstruction + +from .graphics_test_helpers import VALID_API_KEY, build_file_info_payload, make_pdf_file + + +@pytest.mark.parametrize( + "redactions", + [ + pytest.param( + [ + {"type": "literal", "value": "Sensitive"}, + {"type": "preset", "value": "email"}, + ], + id="list", + ), + pytest.param({"type": "regex", "value": "\\d{3}-\\d{2}-\\d{4}"}, id="single"), + ], +) +def test_preview_redactions_success( + monkeypatch: pytest.MonkeyPatch, + redactions: PdfRedactionInstruction | list[PdfRedactionInstruction], +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + payload_model_dump = PdfRedactionPreviewPayload.model_validate( + { + "files": [input_file], + "redactions": redactions, + "output": "preview-output", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + def handler(request: httpx.Request) -> httpx.Response: + if ( + request.method == "POST" + and request.url.path == "/pdf-with-redacted-text-preview" + ): + body = json.loads(request.content.decode("utf-8")) + assert body == payload_model_dump + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "preview-output.pdf", "application/pdf" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.preview_redactions( + input_file, + redactions=redactions, + output="preview-output", + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert str(response.input_id) == str(input_file.id) + assert response.output_files[0].name == "preview-output.pdf" + assert response.output_files[0].type == "application/pdf" + assert response.warning is None + + +def test_preview_redactions_invalid_preset(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="Input should be 'email'"), + ): + client.preview_redactions( + input_file, + redactions=[{"type": "preset", "value": "unknown"}], + ) + + +def test_preview_redactions_reject_json_string(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="valid dictionary"), + ): + client.preview_redactions( + input_file, + redactions=json.dumps([{"type": "literal", "value": "secret"}]), # type: ignore[arg-type] + ) + + +def test_preview_redactions_requires_instruction( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="at least 1 item"), + ): + client.preview_redactions(input_file, redactions=[]) diff --git a/tests/test_query_pdf_info.py b/tests/test_query_pdf_info.py new file mode 100644 index 00000000..8e4fe885 --- /dev/null +++ b/tests/test_query_pdf_info.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import json +from collections.abc import Sequence + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileID, PdfRestInfoResponse +from pdfrest.types import ALL_PDF_INFO_QUERIES, PdfInfoQuery + +from .graphics_test_helpers import ASYNC_API_KEY, VALID_API_KEY, make_pdf_file + + +@pytest.mark.parametrize( + ("queries_input", "expected_serialized"), + [ + pytest.param(["tagged", "page_count"], "tagged,page_count", id="list"), + pytest.param(("tagged", "page_count"), "tagged,page_count", id="tuple"), + pytest.param("tagged", "tagged", id="single"), + ], +) +def test_query_pdf_info_success( + monkeypatch: pytest.MonkeyPatch, + queries_input: Sequence[PdfInfoQuery] | PdfInfoQuery, + expected_serialized: str, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(str(PdfRestFileID.generate())) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method != "POST" or request.url.path != "/pdf-info": + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "id": str(input_file.id), + "queries": expected_serialized, + } + return httpx.Response( + 200, + json={ + "inputId": str(input_file.id), + "page_count": 2, + "title": "Example Document", + "tagged": True, + "allQueriesProcessed": True, + }, + ) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.query_pdf_info(input_file, queries=queries_input) + + assert isinstance(response, PdfRestInfoResponse) + assert response.page_count == 2 + assert response.title == "Example Document" + assert response.tagged is True + assert response.all_queries_processed is True + + +def test_query_pdf_info_default_queries(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(str(PdfRestFileID.generate())) + expected_serialized = ",".join(ALL_PDF_INFO_QUERIES) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method != "POST" or request.url.path != "/pdf-info": + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "id": str(input_file.id), + "queries": expected_serialized, + } + return httpx.Response( + 200, + json={ + "inputId": str(input_file.id), + "page_count": 1, + "tagged": False, + "allQueriesProcessed": True, + }, + ) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.query_pdf_info(input_file) + + assert isinstance(response, PdfRestInfoResponse) + assert response.page_count == 1 + assert response.tagged is False + + +def test_query_pdf_info_request_customization(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + resource_id = PdfRestFileID.generate() + resource_file = make_pdf_file(str(resource_id)) + captured_timeout: dict[str, float | dict[str, float] | None] = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method != "POST" or request.url.path != "/pdf-info": + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + assert request.url.params["trace"] == "true" + assert request.headers["X-Debug"] == "1" + captured_timeout["value"] = request.extensions.get("timeout") + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "id": str(resource_file.id), + "queries": "tagged", + "debug": True, + } + return httpx.Response( + 200, + json={ + "inputId": str(resource_file.id), + "tagged": False, + "allQueriesProcessed": True, + }, + ) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.query_pdf_info( + resource_file, + queries="tagged", + extra_query={"trace": "true"}, + extra_headers={"X-Debug": "1"}, + extra_body={"debug": True}, + timeout=0.75, + ) + + assert isinstance(response, PdfRestInfoResponse) + assert response.tagged is False + timeout_value = captured_timeout["value"] + assert timeout_value is not None + if isinstance(timeout_value, dict): + assert all( + component == pytest.approx(0.75) for component in timeout_value.values() + ) + else: + assert timeout_value == pytest.approx(0.75) + + +def test_query_pdf_info_requires_queries(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(str(PdfRestFileID.generate())) + transport = httpx.MockTransport( + lambda request: pytest.fail("request should not be sent") + ) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="List should have at least 1 item"), + ): + client.query_pdf_info(input_file, queries=[]) + + +def test_query_pdf_info_rejects_invalid_queries( + monkeypatch: pytest.MonkeyPatch, +) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(str(PdfRestFileID.generate())) + transport = httpx.MockTransport( + lambda request: pytest.fail("request should not be sent") + ) + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises( + ValidationError, + match="Input should be 'tagged'", + ), + ): + client.query_pdf_info(input_file, queries=["not_a_real_query"]) # type: ignore[list-item] + + +def test_query_pdf_info_accepts_sequence(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + file_a = make_pdf_file(str(PdfRestFileID.generate())) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method != "POST" or request.url.path != "/pdf-info": + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "id": str(file_a.id), + "queries": "tagged,page_count", + } + return httpx.Response( + 200, + json={ + "inputId": str(file_a.id), + "tagged": True, + "page_count": 5, + "allQueriesProcessed": True, + }, + ) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.query_pdf_info([file_a], queries=("tagged", "page_count")) + + assert isinstance(response, PdfRestInfoResponse) + assert response.page_count == 5 + assert response.tagged is True + + +@pytest.mark.asyncio +async def test_async_query_pdf_info(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(str(PdfRestFileID.generate())) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method != "POST" or request.url.path != "/pdf-info": + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + payload = json.loads(request.content.decode("utf-8")) + assert payload == { + "id": str(input_file.id), + "queries": "tagged", + } + return httpx.Response( + 200, + json={ + "inputId": str(input_file.id), + "tagged": True, + "allQueriesProcessed": True, + }, + ) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.query_pdf_info(input_file, queries="tagged") + + assert isinstance(response, PdfRestInfoResponse) + assert response.tagged is True + assert response.all_queries_processed is True diff --git a/tests/test_split_pdf.py b/tests/test_split_pdf.py new file mode 100644 index 00000000..2afd4c6f --- /dev/null +++ b/tests/test_split_pdf.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +import json + +import httpx +import pytest +from pydantic import ValidationError + +from pdfrest import AsyncPdfRestClient, PdfRestClient +from pdfrest.models import PdfRestFileBasedResponse, PdfRestFileID +from pdfrest.models._internal import PdfSplitPayload + +from .graphics_test_helpers import ( + ASYNC_API_KEY, + VALID_API_KEY, + build_file_info_payload, + make_pdf_file, +) + + +def test_split_pdf_success(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_ids = [str(PdfRestFileID.generate()) for _ in range(4)] + page_groups: list[str | list[int | str]] = [ + ["1", "2-4", 5, "6-last"], + "even", + "9-2", + "odd", + ] + + request_payload = PdfSplitPayload.model_validate( + { + "files": [input_file], + "page_groups": page_groups, + "output_prefix": "split-output", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + seen: dict[str, int] = {"post": 0, "get": 0} + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/split-pdf": + seen["post"] += 1 + payload = json.loads(request.content.decode("utf-8")) + assert payload == request_payload + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": output_ids, + }, + ) + if request.method == "GET" and request.url.path in { + f"/resource/{identifier}" for identifier in output_ids + }: + seen["get"] += 1 + file_id = request.url.path.split("/")[-1] + index = output_ids.index(file_id) + 1 + name = f"split-output-{index:03d}.pdf" + return httpx.Response( + 200, + json=build_file_info_payload(file_id, name, "application/pdf"), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.split_pdf( + input_file, + page_groups=page_groups, + output_prefix="split-output", + ) + + assert seen["post"] == 1 + assert seen["get"] == len(output_ids) + assert isinstance(response, PdfRestFileBasedResponse) + assert len(response.output_files) == len(output_ids) + expected_names = [ + f"split-output-{idx:03d}.pdf" for idx in range(1, len(output_ids) + 1) + ] + assert [output_file.name for output_file in response.output_files] == expected_names + assert [output_file.type for output_file in response.output_files] == [ + "application/pdf" + ] * len(output_ids) + assert str(response.input_id) == str(input_file.id) + assert response.warning is None + + +def test_split_pdf_without_page_groups(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_ids = [str(PdfRestFileID.generate()) for _ in range(3)] + + request_payload = PdfSplitPayload.model_validate( + { + "files": [input_file], + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/split-pdf": + payload = json.loads(request.content.decode("utf-8")) + assert payload == request_payload + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": output_ids, + }, + ) + if request.method == "GET" and request.url.path in { + f"/resource/{identifier}" for identifier in output_ids + }: + file_id = request.url.path.split("/")[-1] + index = output_ids.index(file_id) + 1 + return httpx.Response( + 200, + json=build_file_info_payload( + file_id, + f"auto-split-{index:03d}.pdf", + "application/pdf", + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + with PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client: + response = client.split_pdf(input_file) + + assert isinstance(response, PdfRestFileBasedResponse) + assert len(response.output_files) == len(output_ids) + assert [file.name for file in response.output_files] == [ + f"auto-split-{idx:03d}.pdf" for idx in range(1, len(output_ids) + 1) + ] + assert all(file.type == "application/pdf" for file in response.output_files) + + +def test_split_pdf_invalid_page_group(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + transport = httpx.MockTransport(lambda request: (_ for _ in ()).throw(RuntimeError)) + + with ( + PdfRestClient(api_key=VALID_API_KEY, transport=transport) as client, + pytest.raises(ValidationError, match="greater than or equal to 1"), + ): + client.split_pdf(input_file, page_groups=["0"]) + + +@pytest.mark.asyncio +async def test_async_split_pdf(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("PDFREST_API_KEY", raising=False) + input_file = make_pdf_file(PdfRestFileID.generate(1)) + output_id = str(PdfRestFileID.generate()) + + request_payload = PdfSplitPayload.model_validate( + { + "files": [input_file], + "output_prefix": "async-split", + } + ).model_dump(mode="json", by_alias=True, exclude_none=True, exclude_unset=True) + + def handler(request: httpx.Request) -> httpx.Response: + if request.method == "POST" and request.url.path == "/split-pdf": + payload = json.loads(request.content.decode("utf-8")) + assert payload == request_payload + return httpx.Response( + 200, + json={ + "inputId": [input_file.id], + "outputId": [output_id], + }, + ) + if request.method == "GET" and request.url.path == f"/resource/{output_id}": + return httpx.Response( + 200, + json=build_file_info_payload( + output_id, "async-split-001.pdf", "application/pdf" + ), + ) + msg = f"Unexpected request {request.method} {request.url}" + raise AssertionError(msg) + + transport = httpx.MockTransport(handler) + async with AsyncPdfRestClient(api_key=ASYNC_API_KEY, transport=transport) as client: + response = await client.split_pdf( + input_file, + output_prefix="async-split", + ) + + assert isinstance(response, PdfRestFileBasedResponse) + assert response.output_files[0].name == "async-split-001.pdf" diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..1a4b3243 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1284 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "argcomplete" +version = "3.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/0f/861e168fc813c56a78b35f3c30d91c6757d1fd185af1110f1aec784b35d0/argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf", size = 73403, upload-time = "2025-04-03T04:57:03.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/da/e42d7a9d8dd33fa775f467e4028a47936da2f01e4b0e561f9ba0d74cb0ca/argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591", size = 43708, upload-time = "2025-04-03T04:57:01.591Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "boolean-py" +version = "5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/cf/85379f13b76f3a69bca86b60237978af17d6aa0bc5998978c3b8cf05abb2/boolean_py-5.0.tar.gz", hash = "sha256:60cbc4bad079753721d32649545505362c754e121570ada4658b852a3a318d95", size = 37047, upload-time = "2025-04-03T10:39:49.734Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/ca/78d423b324b8d77900030fa59c4aa9054261ef0925631cd2501dd015b7b7/boolean_py-5.0-py3-none-any.whl", hash = "sha256:ef28a70bd43115208441b53a045d1549e2f0ec6e3d08a9d142cbc41c1938e8d9", size = 26577, upload-time = "2025-04-03T10:39:48.449Z" }, +] + +[[package]] +name = "cachecontrol" +version = "0.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msgpack" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/3a/0cbeb04ea57d2493f3ec5a069a117ab467f85e4a10017c6d854ddcbff104/cachecontrol-0.14.3.tar.gz", hash = "sha256:73e7efec4b06b20d9267b441c1f733664f989fb8688391b670ca812d70795d11", size = 28985, upload-time = "2025-04-30T16:45:06.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/4c/800b0607b00b3fd20f1087f80ab53d6b4d005515b0f773e4831e37cfa83f/cachecontrol-0.14.3-py3-none-any.whl", hash = "sha256:b35e44a3113f17d2a31c1e6b27b9de6d4405f84ae51baa8c1d3cc5b633010cae", size = 21802, upload-time = "2025-04-30T16:45:03.863Z" }, +] + +[package.optional-dependencies] +filecache = [ + { name = "filelock" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "colorlog" +version = "6.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624, upload-time = "2024-10-29T18:34:51.011Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424, upload-time = "2024-10-29T18:34:49.815Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cyclonedx-python-lib" +version = "9.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "license-expression" }, + { name = "packageurl-python" }, + { name = "py-serializable" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/fc/abaad5482f7b59c9a0a9d8f354ce4ce23346d582a0d85730b559562bbeb4/cyclonedx_python_lib-9.1.0.tar.gz", hash = "sha256:86935f2c88a7b47a529b93c724dbd3e903bc573f6f8bd977628a7ca1b5dadea1", size = 1048735, upload-time = "2025-02-27T17:23:40.367Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/f1/f3be2e9820a2c26fa77622223e91f9c504e1581830930d477e06146073f4/cyclonedx_python_lib-9.1.0-py3-none-any.whl", hash = "sha256:55693fca8edaecc3363b24af14e82cc6e659eb1e8353e58b587c42652ce0fb52", size = 374968, upload-time = "2025-02-27T17:23:37.766Z" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, +] + +[[package]] +name = "dependency-groups" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/55/f054de99871e7beb81935dea8a10b90cd5ce42122b1c3081d5282fdb3621/dependency_groups-1.3.1.tar.gz", hash = "sha256:78078301090517fd938c19f64a53ce98c32834dfe0dee6b88004a569a6adfefd", size = 10093, upload-time = "2025-05-02T00:34:29.452Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/c7/d1ec24fb280caa5a79b6b950db565dab30210a66259d17d5bb2b3a9f878d/dependency_groups-1.3.1-py3-none-any.whl", hash = "sha256:51aeaa0dfad72430fcfb7bcdbefbd75f3792e5919563077f30bc0d73f4493030", size = 8664, upload-time = "2025-05-02T00:34:27.085Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, +] + +[[package]] +name = "filelock" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "identify" +version = "2.6.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "license-expression" +version = "30.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boolean-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/71/d89bb0e71b1415453980fd32315f2a037aad9f7f70f695c7cec7035feb13/license_expression-30.4.4.tar.gz", hash = "sha256:73448f0aacd8d0808895bdc4b2c8e01a8d67646e4188f887375398c761f340fd", size = 186402, upload-time = "2025-07-22T11:13:32.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/40/791891d4c0c4dab4c5e187c17261cedc26285fd41541577f900470a45a4d/license_expression-30.4.4-py3-none-any.whl", hash = "sha256:421788fdcadb41f049d2dc934ce666626265aeccefddd25e162a26f23bcbf8a4", size = 120615, upload-time = "2025-07-22T11:13:31.217Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2", size = 81318, upload-time = "2025-10-08T09:14:38.722Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87", size = 83786, upload-time = "2025-10-08T09:14:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251", size = 398240, upload-time = "2025-10-08T09:14:41.151Z" }, + { url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a", size = 406070, upload-time = "2025-10-08T09:14:42.821Z" }, + { url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f", size = 393403, upload-time = "2025-10-08T09:14:44.38Z" }, + { url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f", size = 398947, upload-time = "2025-10-08T09:14:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9", size = 64769, upload-time = "2025-10-08T09:14:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa", size = 71293, upload-time = "2025-10-08T09:14:48.665Z" }, + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "nox" +version = "2025.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "attrs" }, + { name = "colorlog" }, + { name = "dependency-groups" }, + { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b4/80/47712208c410defec169992e57c179f0f4d92f5dd17ba8daca50a8077e23/nox-2025.5.1.tar.gz", hash = "sha256:2a571dfa7a58acc726521ac3cd8184455ebcdcbf26401c7b737b5bc6701427b2", size = 4023334, upload-time = "2025-05-01T16:35:48.056Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/be/7b423b02b09eb856beffe76fe8c4121c99852db74dd12a422dcb72d1134e/nox-2025.5.1-py3-none-any.whl", hash = "sha256:56abd55cf37ff523c254fcec4d152ed51e5fe80e2ab8317221d8b828ac970a31", size = 71753, upload-time = "2025-05-01T16:35:46.037Z" }, +] + +[[package]] +name = "packageurl-python" +version = "0.17.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/f0/de0ac00a4484c0d87b71e3d9985518278d89797fa725e90abd3453bccb42/packageurl_python-0.17.5.tar.gz", hash = "sha256:a7be3f3ba70d705f738ace9bf6124f31920245a49fa69d4b416da7037dd2de61", size = 43832, upload-time = "2025-08-06T14:08:20.235Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/78/9dbb7d2ef240d20caf6f79c0f66866737c9d0959601fd783ff635d1d019d/packageurl_python-0.17.5-py3-none-any.whl", hash = "sha256:f0e55452ab37b5c192c443de1458e3f3b4d8ac27f747df6e8c48adeab081d321", size = 30544, upload-time = "2025-08-06T14:08:19.055Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pdfrest" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] + +[package.dev-dependencies] +dev = [ + { name = "mypy" }, + { name = "nox" }, + { name = "pip-audit" }, + { name = "pre-commit" }, + { name = "pyright" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-dotenv" }, + { name = "pytest-emoji" }, + { name = "pytest-md" }, + { name = "pytest-rerunfailures" }, + { name = "pytest-xdist" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "httpx", specifier = ">=0.28.1" }, + { name = "pydantic", specifier = ">=2.12.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "mypy", specifier = ">=1.18.2" }, + { name = "nox", specifier = ">=2025.5.1" }, + { name = "pip-audit", specifier = ">=2.7.3" }, + { name = "pre-commit", specifier = ">=3.7.0" }, + { name = "pyright", specifier = ">=1.1.406" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "pytest-asyncio", specifier = ">=1.2.0" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "pytest-dotenv", specifier = ">=0.5.2" }, + { name = "pytest-emoji", specifier = ">=0.2.0" }, + { name = "pytest-md", specifier = ">=0.2.0" }, + { name = "pytest-rerunfailures", specifier = ">=16.0.1" }, + { name = "pytest-xdist", specifier = ">=3.8.0" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + +[[package]] +name = "pip" +version = "25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" }, +] + +[[package]] +name = "pip-api" +version = "0.0.34" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pip" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/f1/ee85f8c7e82bccf90a3c7aad22863cc6e20057860a1361083cd2adacb92e/pip_api-0.0.34.tar.gz", hash = "sha256:9b75e958f14c5a2614bae415f2adf7eeb54d50a2cfbe7e24fd4826471bac3625", size = 123017, upload-time = "2024-07-09T20:32:30.641Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/f7/ebf5003e1065fd00b4cbef53bf0a65c3d3e1b599b676d5383ccb7a8b88ba/pip_api-0.0.34-py3-none-any.whl", hash = "sha256:8b2d7d7c37f2447373aa2cf8b1f60a2f2b27a84e1e9e0294a3f6ef10eb3ba6bb", size = 120369, upload-time = "2024-07-09T20:32:29.099Z" }, +] + +[[package]] +name = "pip-audit" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachecontrol", extra = ["filecache"] }, + { name = "cyclonedx-python-lib" }, + { name = "packaging" }, + { name = "pip-api" }, + { name = "pip-requirements-parser" }, + { name = "platformdirs" }, + { name = "requests" }, + { name = "rich" }, + { name = "toml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/7f/28fad19a9806f796f13192ab6974c07c4a04d9cbb8e30dd895c3c11ce7ee/pip_audit-2.9.0.tar.gz", hash = "sha256:0b998410b58339d7a231e5aa004326a294e4c7c6295289cdc9d5e1ef07b1f44d", size = 52089, upload-time = "2025-04-07T16:45:23.679Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/9e/f4dfd9d3dadb6d6dc9406f1111062f871e2e248ed7b584cca6020baf2ac1/pip_audit-2.9.0-py3-none-any.whl", hash = "sha256:348b16e60895749a0839875d7cc27ebd692e1584ebe5d5cb145941c8e25a80bd", size = 58634, upload-time = "2025-04-07T16:45:22.056Z" }, +] + +[[package]] +name = "pip-requirements-parser" +version = "32.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/2a/63b574101850e7f7b306ddbdb02cb294380d37948140eecd468fae392b54/pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3", size = 209359, upload-time = "2022-12-21T15:25:22.732Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/d0/d04f1d1e064ac901439699ee097f58688caadea42498ec9c4b4ad2ef84ab/pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526", size = 35648, upload-time = "2022-12-21T15:25:21.046Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + +[[package]] +name = "py-serializable" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "defusedxml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/21/d250cfca8ff30c2e5a7447bc13861541126ce9bd4426cd5d0c9f08b5547d/py_serializable-2.1.0.tar.gz", hash = "sha256:9d5db56154a867a9b897c0163b33a793c804c80cee984116d02d49e4578fc103", size = 52368, upload-time = "2025-07-21T09:56:48.07Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/bf/7595e817906a29453ba4d99394e781b6fabe55d21f3c15d240f85dd06bb1/py_serializable-2.1.0-py3-none-any.whl", hash = "sha256:b56d5d686b5a03ba4f4db5e769dc32336e142fc3bd4d68a8c25579ebb0a67304", size = 23045, upload-time = "2025-07-21T09:56:46.848Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760, upload-time = "2025-10-07T15:58:03.467Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730, upload-time = "2025-10-07T15:58:01.576Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2c/a5c4640dc7132540109f67fe83b566fbc7512ccf2a068cfa22a243df70c7/pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e63036298322e9aea1c8b7c0a6c1204d615dbf6ec0668ce5b83ff27f07404a61", size = 2113814, upload-time = "2025-10-06T21:09:50.892Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e7/a8694c3454a57842095d69c7a4ab3cf81c3c7b590f052738eabfdfc2e234/pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:241299ca91fc77ef64f11ed909d2d9220a01834e8e6f8de61275c4dd16b7c936", size = 1916660, upload-time = "2025-10-06T21:09:52.783Z" }, + { url = "https://files.pythonhosted.org/packages/9c/58/29f12e65b19c1877a0269eb4f23c5d2267eded6120a7d6762501ab843dc9/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab7e594a2a5c24ab8013a7dc8cfe5f2260e80e490685814122081705c2cf2b0", size = 1975071, upload-time = "2025-10-06T21:09:54.009Z" }, + { url = "https://files.pythonhosted.org/packages/98/26/4e677f2b7ec3fbdd10be6b586a82a814c8ebe3e474024c8df2d4260e564e/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b054ef1a78519cb934b58e9c90c09e93b837c935dcd907b891f2b265b129eb6e", size = 2067271, upload-time = "2025-10-06T21:09:55.175Z" }, + { url = "https://files.pythonhosted.org/packages/29/50/50614bd906089904d7ca1be3b9ecf08c00a327143d48f1decfdc21b3c302/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2ab7d10d0ab2ed6da54c757233eb0f48ebfb4f86e9b88ccecb3f92bbd61a538", size = 2253207, upload-time = "2025-10-06T21:09:56.709Z" }, + { url = "https://files.pythonhosted.org/packages/ea/58/b1e640b4ca559273cca7c28e0fe8891d5d8e9a600f5ab4882670ec107549/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2757606b7948bb853a27e4040820306eaa0ccb9e8f9f8a0fa40cb674e170f350", size = 2375052, upload-time = "2025-10-06T21:09:57.97Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/cd47df3bfb24350e03835f0950288d1054f1cc9a8023401dabe6d4ff2834/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec0e75eb61f606bad0a32f2be87507087514e26e8c73db6cbdb8371ccd27917", size = 2076834, upload-time = "2025-10-06T21:09:59.58Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b4/71b2c77e5df527fbbc1a03e72c3fd96c44cd10d4241a81befef8c12b9fc4/pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0234236514f44a5bf552105cfe2543a12f48203397d9d0f866affa569345a5b5", size = 2195374, upload-time = "2025-10-06T21:10:01.18Z" }, + { url = "https://files.pythonhosted.org/packages/aa/08/4b8a50733005865efde284fec45da75fe16a258f706e16323c5ace4004eb/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b974e41adfbb4ebb0f65fc4ca951347b17463d60893ba7d5f7b9bb087c83897", size = 2156060, upload-time = "2025-10-06T21:10:02.74Z" }, + { url = "https://files.pythonhosted.org/packages/83/c3/1037cb603ef2130c210150a51b1710d86825b5c28df54a55750099f91196/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:248dafb3204136113c383e91a4d815269f51562b6659b756cf3df14eefc7d0bb", size = 2331640, upload-time = "2025-10-06T21:10:04.39Z" }, + { url = "https://files.pythonhosted.org/packages/56/4c/52d111869610e6b1a46e1f1035abcdc94d0655587e39104433a290e9f377/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:678f9d76a91d6bcedd7568bbf6beb77ae8447f85d1aeebaab7e2f0829cfc3a13", size = 2329844, upload-time = "2025-10-06T21:10:05.68Z" }, + { url = "https://files.pythonhosted.org/packages/32/5d/4b435f0b52ab543967761aca66b84ad3f0026e491e57de47693d15d0a8db/pydantic_core-2.41.1-cp310-cp310-win32.whl", hash = "sha256:dff5bee1d21ee58277900692a641925d2dddfde65182c972569b1a276d2ac8fb", size = 1991289, upload-time = "2025-10-06T21:10:07.199Z" }, + { url = "https://files.pythonhosted.org/packages/88/52/31b4deafc1d3cb96d0e7c0af70f0dc05454982d135d07f5117e6336153e8/pydantic_core-2.41.1-cp310-cp310-win_amd64.whl", hash = "sha256:5042da12e5d97d215f91567110fdfa2e2595a25f17c19b9ff024f31c34f9b53e", size = 2027747, upload-time = "2025-10-06T21:10:08.503Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1", size = 2111456, upload-time = "2025-10-06T21:10:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176", size = 1915012, upload-time = "2025-10-06T21:10:11.599Z" }, + { url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0", size = 1973762, upload-time = "2025-10-06T21:10:13.154Z" }, + { url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575", size = 2065386, upload-time = "2025-10-06T21:10:14.436Z" }, + { url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20", size = 2252317, upload-time = "2025-10-06T21:10:15.719Z" }, + { url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4", size = 2373405, upload-time = "2025-10-06T21:10:17.017Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d", size = 2073794, upload-time = "2025-10-06T21:10:18.383Z" }, + { url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1", size = 2194430, upload-time = "2025-10-06T21:10:19.638Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9", size = 2154611, upload-time = "2025-10-06T21:10:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1", size = 2329809, upload-time = "2025-10-06T21:10:22.678Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea", size = 2327907, upload-time = "2025-10-06T21:10:24.447Z" }, + { url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f", size = 1989964, upload-time = "2025-10-06T21:10:25.676Z" }, + { url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298", size = 2025158, upload-time = "2025-10-06T21:10:27.522Z" }, + { url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5", size = 1972297, upload-time = "2025-10-06T21:10:28.814Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315, upload-time = "2025-10-06T21:10:34.87Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298, upload-time = "2025-10-06T21:10:36.233Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797, upload-time = "2025-10-06T21:10:37.601Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921, upload-time = "2025-10-06T21:10:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767, upload-time = "2025-10-06T21:10:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062, upload-time = "2025-10-06T21:10:42.09Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819, upload-time = "2025-10-06T21:10:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267, upload-time = "2025-10-06T21:10:45.34Z" }, + { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927, upload-time = "2025-10-06T21:10:46.738Z" }, + { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703, upload-time = "2025-10-06T21:10:48.524Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719, upload-time = "2025-10-06T21:10:50.256Z" }, + { url = "https://files.pythonhosted.org/packages/27/8a/6d54198536a90a37807d31a156642aae7a8e1263ed9fe6fc6245defe9332/pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf", size = 2105825, upload-time = "2025-10-06T21:10:51.719Z" }, + { url = "https://files.pythonhosted.org/packages/4f/2e/4784fd7b22ac9c8439db25bf98ffed6853d01e7e560a346e8af821776ccc/pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb", size = 1910126, upload-time = "2025-10-06T21:10:53.145Z" }, + { url = "https://files.pythonhosted.org/packages/f3/92/31eb0748059ba5bd0aa708fb4bab9fcb211461ddcf9e90702a6542f22d0d/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669", size = 1961472, upload-time = "2025-10-06T21:10:55.754Z" }, + { url = "https://files.pythonhosted.org/packages/ab/91/946527792275b5c4c7dde4cfa3e81241bf6900e9fee74fb1ba43e0c0f1ab/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f", size = 2063230, upload-time = "2025-10-06T21:10:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/31/5d/a35c5d7b414e5c0749f1d9f0d159ee2ef4bab313f499692896b918014ee3/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4", size = 2229469, upload-time = "2025-10-06T21:10:59.409Z" }, + { url = "https://files.pythonhosted.org/packages/21/4d/8713737c689afa57ecfefe38db78259d4484c97aa494979e6a9d19662584/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62", size = 2347986, upload-time = "2025-10-06T21:11:00.847Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ec/929f9a3a5ed5cda767081494bacd32f783e707a690ce6eeb5e0730ec4986/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014", size = 2072216, upload-time = "2025-10-06T21:11:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/a33f459d4f9cc8786d9db42795dbecc84fa724b290d7d71ddc3d7155d46a/pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d", size = 2193047, upload-time = "2025-10-06T21:11:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/77/af/d5c6959f8b089f2185760a2779079e3c2c411bfc70ea6111f58367851629/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f", size = 2140613, upload-time = "2025-10-06T21:11:05.607Z" }, + { url = "https://files.pythonhosted.org/packages/58/e5/2c19bd2a14bffe7fabcf00efbfbd3ac430aaec5271b504a938ff019ac7be/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257", size = 2327641, upload-time = "2025-10-06T21:11:07.143Z" }, + { url = "https://files.pythonhosted.org/packages/93/ef/e0870ccda798c54e6b100aff3c4d49df5458fd64217e860cb9c3b0a403f4/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32", size = 2318229, upload-time = "2025-10-06T21:11:08.73Z" }, + { url = "https://files.pythonhosted.org/packages/b1/4b/c3b991d95f5deb24d0bd52e47bcf716098fa1afe0ce2d4bd3125b38566ba/pydantic_core-2.41.1-cp313-cp313-win32.whl", hash = "sha256:a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d", size = 1997911, upload-time = "2025-10-06T21:11:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/5c316fd62e01f8d6be1b7ee6b54273214e871772997dc2c95e204997a055/pydantic_core-2.41.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b", size = 2034301, upload-time = "2025-10-06T21:11:12.113Z" }, + { url = "https://files.pythonhosted.org/packages/29/41/902640cfd6a6523194123e2c3373c60f19006447f2fb06f76de4e8466c5b/pydantic_core-2.41.1-cp313-cp313-win_arm64.whl", hash = "sha256:ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb", size = 1977238, upload-time = "2025-10-06T21:11:14.1Z" }, + { url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc", size = 1875626, upload-time = "2025-10-06T21:11:15.69Z" }, + { url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67", size = 2045708, upload-time = "2025-10-06T21:11:17.258Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795", size = 1997171, upload-time = "2025-10-06T21:11:18.822Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/cec246429ddfa2778d2d6301eca5362194dc8749ecb19e621f2f65b5090f/pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:05226894a26f6f27e1deb735d7308f74ef5fa3a6de3e0135bb66cdcaee88f64b", size = 2107836, upload-time = "2025-10-06T21:11:20.432Z" }, + { url = "https://files.pythonhosted.org/packages/20/39/baba47f8d8b87081302498e610aefc37142ce6a1cc98b2ab6b931a162562/pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:85ff7911c6c3e2fd8d3779c50925f6406d770ea58ea6dde9c230d35b52b16b4a", size = 1904449, upload-time = "2025-10-06T21:11:22.185Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/9a3d87cae2c75a5178334b10358d631bd094b916a00a5993382222dbfd92/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47f1f642a205687d59b52dc1a9a607f45e588f5a2e9eeae05edd80c7a8c47674", size = 1961750, upload-time = "2025-10-06T21:11:24.348Z" }, + { url = "https://files.pythonhosted.org/packages/27/42/a96c9d793a04cf2a9773bff98003bb154087b94f5530a2ce6063ecfec583/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df11c24e138876ace5ec6043e5cae925e34cf38af1a1b3d63589e8f7b5f5cdc4", size = 2063305, upload-time = "2025-10-06T21:11:26.556Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8d/028c4b7d157a005b1f52c086e2d4b0067886b213c86220c1153398dbdf8f/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f0bf7f5c8f7bf345c527e8a0d72d6b26eda99c1227b0c34e7e59e181260de31", size = 2228959, upload-time = "2025-10-06T21:11:28.426Z" }, + { url = "https://files.pythonhosted.org/packages/08/f7/ee64cda8fcc9ca3f4716e6357144f9ee71166775df582a1b6b738bf6da57/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82b887a711d341c2c47352375d73b029418f55b20bd7815446d175a70effa706", size = 2345421, upload-time = "2025-10-06T21:11:30.226Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/e8ec05f0f5ee7a3656973ad9cd3bc73204af99f6512c1a4562f6fb4b3f7d/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5f1d5d6bbba484bdf220c72d8ecd0be460f4bd4c5e534a541bb2cd57589fb8b", size = 2065288, upload-time = "2025-10-06T21:11:32.019Z" }, + { url = "https://files.pythonhosted.org/packages/0a/25/d77a73ff24e2e4fcea64472f5e39b0402d836da9b08b5361a734d0153023/pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bf1917385ebe0f968dc5c6ab1375886d56992b93ddfe6bf52bff575d03662be", size = 2189759, upload-time = "2025-10-06T21:11:33.753Z" }, + { url = "https://files.pythonhosted.org/packages/66/45/4a4ebaaae12a740552278d06fe71418c0f2869537a369a89c0e6723b341d/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4f94f3ab188f44b9a73f7295663f3ecb8f2e2dd03a69c8f2ead50d37785ecb04", size = 2140747, upload-time = "2025-10-06T21:11:35.781Z" }, + { url = "https://files.pythonhosted.org/packages/da/6d/b727ce1022f143194a36593243ff244ed5a1eb3c9122296bf7e716aa37ba/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:3925446673641d37c30bd84a9d597e49f72eacee8b43322c8999fa17d5ae5bc4", size = 2327416, upload-time = "2025-10-06T21:11:37.75Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/02df9d8506c427787059f87c6c7253435c6895e12472a652d9616ee0fc95/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:49bd51cc27adb980c7b97357ae036ce9b3c4d0bb406e84fbe16fb2d368b602a8", size = 2318138, upload-time = "2025-10-06T21:11:39.463Z" }, + { url = "https://files.pythonhosted.org/packages/98/67/0cf429a7d6802536941f430e6e3243f6d4b68f41eeea4b242372f1901794/pydantic_core-2.41.1-cp314-cp314-win32.whl", hash = "sha256:a31ca0cd0e4d12ea0df0077df2d487fc3eb9d7f96bbb13c3c5b88dcc21d05159", size = 1998429, upload-time = "2025-10-06T21:11:41.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/60/742fef93de5d085022d2302a6317a2b34dbfe15258e9396a535c8a100ae7/pydantic_core-2.41.1-cp314-cp314-win_amd64.whl", hash = "sha256:1b5c4374a152e10a22175d7790e644fbd8ff58418890e07e2073ff9d4414efae", size = 2028870, upload-time = "2025-10-06T21:11:43.66Z" }, + { url = "https://files.pythonhosted.org/packages/31/38/cdd8ccb8555ef7720bd7715899bd6cfbe3c29198332710e1b61b8f5dd8b8/pydantic_core-2.41.1-cp314-cp314-win_arm64.whl", hash = "sha256:4fee76d757639b493eb600fba668f1e17475af34c17dd61db7a47e824d464ca9", size = 1974275, upload-time = "2025-10-06T21:11:45.476Z" }, + { url = "https://files.pythonhosted.org/packages/e7/7e/8ac10ccb047dc0221aa2530ec3c7c05ab4656d4d4bd984ee85da7f3d5525/pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f9b9c968cfe5cd576fdd7361f47f27adeb120517e637d1b189eea1c3ece573f4", size = 1875124, upload-time = "2025-10-06T21:11:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e4/7d9791efeb9c7d97e7268f8d20e0da24d03438a7fa7163ab58f1073ba968/pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ebc7ab67b856384aba09ed74e3e977dded40e693de18a4f197c67d0d4e6d8e", size = 2043075, upload-time = "2025-10-06T21:11:49.542Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c3/3f6e6b2342ac11ac8cd5cb56e24c7b14afa27c010e82a765ffa5f771884a/pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8ae0dc57b62a762985bc7fbf636be3412394acc0ddb4ade07fe104230f1b9762", size = 1995341, upload-time = "2025-10-06T21:11:51.497Z" }, + { url = "https://files.pythonhosted.org/packages/16/89/d0afad37ba25f5801735af1472e650b86baad9fe807a42076508e4824a2a/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0", size = 2124001, upload-time = "2025-10-07T10:49:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/8e/c4/08609134b34520568ddebb084d9ed0a2a3f5f52b45739e6e22cb3a7112eb/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20", size = 1941841, upload-time = "2025-10-07T10:49:56.248Z" }, + { url = "https://files.pythonhosted.org/packages/2a/43/94a4877094e5fe19a3f37e7e817772263e2c573c94f1e3fa2b1eee56ef3b/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d", size = 1961129, upload-time = "2025-10-07T10:49:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/a2/30/23a224d7e25260eb5f69783a63667453037e07eb91ff0e62dabaadd47128/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a", size = 2148770, upload-time = "2025-10-07T10:49:59.959Z" }, + { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, + { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/f403d7ca8352e3e4df352ccacd200f5f7f7fe81cef8e458515f015091625/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fabcbdb12de6eada8d6e9a759097adb3c15440fafc675b3e94ae5c9cb8d678a0", size = 2114268, upload-time = "2025-10-07T10:50:10.257Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b5/334473b6d2810df84db67f03d4f666acacfc538512c2d2a254074fee0889/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e97ccfaf0aaf67d55de5085b0ed0d994f57747d9d03f2de5cc9847ca737b08", size = 1935786, upload-time = "2025-10-07T10:50:12.333Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5e/45513e4dc621f47397cfa5fef12ba8fa5e8b1c4c07f2ff2a5fef8ff81b25/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34df1fe8fea5d332484a763702e8b6a54048a9d4fe6ccf41e34a128238e01f52", size = 1971995, upload-time = "2025-10-07T10:50:14.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/e3/f1797c168e5f52b973bed1c585e99827a22d5e579d1ed57d51bc15b14633/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:421b5595f845842fc093f7250e24ee395f54ca62d494fdde96f43ecf9228ae01", size = 2191264, upload-time = "2025-10-07T10:50:15.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/e1/24ef4c3b4ab91c21c3a09a966c7d2cffe101058a7bfe5cc8b2c7c7d574e2/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dce8b22663c134583aaad24827863306a933f576c79da450be3984924e2031d1", size = 2152430, upload-time = "2025-10-07T10:50:18.018Z" }, + { url = "https://files.pythonhosted.org/packages/35/74/70c1e225d67f7ef3fdba02c506d9011efaf734020914920b2aa3d1a45e61/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:300a9c162fea9906cc5c103893ca2602afd84f0ec90d3be36f4cc360125d22e1", size = 2324691, upload-time = "2025-10-07T10:50:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bf/dd4d21037c8bef0d8cce90a86a3f2dcb011c30086db2a10113c3eea23eba/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e019167628f6e6161ae7ab9fb70f6d076a0bf0d55aa9b20833f86a320c70dd65", size = 2324493, upload-time = "2025-10-07T10:50:21.568Z" }, + { url = "https://files.pythonhosted.org/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301", size = 2146156, upload-time = "2025-10-07T10:50:23.475Z" }, + { url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1", size = 2112094, upload-time = "2025-10-07T10:50:25.513Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696", size = 1935300, upload-time = "2025-10-07T10:50:27.715Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222", size = 1970417, upload-time = "2025-10-07T10:50:29.573Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2", size = 2190745, upload-time = "2025-10-07T10:50:31.48Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506", size = 2150888, upload-time = "2025-10-07T10:50:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656", size = 2324489, upload-time = "2025-10-07T10:50:36.48Z" }, + { url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e", size = 2322763, upload-time = "2025-10-07T10:50:38.751Z" }, + { url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb", size = 2144046, upload-time = "2025-10-07T10:50:40.686Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.406" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151, upload-time = "2025-10-02T01:04:45.488Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982, upload-time = "2025-10-02T01:04:43.137Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/b0/cafee9c627c1bae228eb07c9977f679b3a7cb111b488307ab9594ba9e4da/pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732", size = 3782, upload-time = "2020-06-16T12:38:03.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/da/9da67c67b3d0963160e3d2cbc7c38b6fae342670cc8e6d5936644b2cf944/pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f", size = 3993, upload-time = "2020-06-16T12:38:01.139Z" }, +] + +[[package]] +name = "pytest-emoji" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/4d/d489f939f0717a034cea7955d36bc2a7a5ba1b263871e63ad8cb16d47555/pytest-emoji-0.2.0.tar.gz", hash = "sha256:e1bd4790d87649c2d09c272c88bdfc4d37c1cc7c7a46583087d7c510944571e8", size = 6171, upload-time = "2019-02-19T09:33:17.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/51/80af966c0aded877da7577d21c4601ca98c6f603c6e6073ddea071af01ec/pytest_emoji-0.2.0-py3-none-any.whl", hash = "sha256:6e34ed21970fa4b80a56ad11417456bd873eb066c02315fe9df0fafe6d4d4436", size = 5664, upload-time = "2019-02-19T09:33:15.771Z" }, +] + +[[package]] +name = "pytest-md" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/55/1d4248f08a97255abb23b05d8ba07586333194fadb17beda96b707aebecd/pytest-md-0.2.0.tar.gz", hash = "sha256:3b248d5b360ea5198e05b4f49c7442234812809a63137ec6cdd3643a40cf0112", size = 5985, upload-time = "2019-07-11T08:15:59.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/71/23d03f57c18116c6770141478e33b3500c4e92500cf4b49a396e9226733f/pytest_md-0.2.0-py3-none-any.whl", hash = "sha256:4c4cd16fea6d1485e87ee254558712c804a96d2aa9674b780e7eb8fb6526e1d1", size = 6117, upload-time = "2019-07-11T08:15:57.829Z" }, +] + +[[package]] +name = "pytest-rerunfailures" +version = "16.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/53/a543a76f922a5337d10df22441af8bf68f1b421cadf9aedf8a77943b81f6/pytest_rerunfailures-16.0.1.tar.gz", hash = "sha256:ed4b3a6e7badb0a720ddd93f9de1e124ba99a0cb13bc88561b3c168c16062559", size = 27612, upload-time = "2025-09-02T06:48:25.193Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/73/67dc14cda1942914e70fbb117fceaf11e259362c517bdadd76b0dd752524/pytest_rerunfailures-16.0.1-py3-none-any.whl", hash = "sha256:0bccc0e3b0e3388275c25a100f7077081318196569a121217688ed05e58984b9", size = 13610, upload-time = "2025-09-02T06:48:23.615Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, + { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, + { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, + { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, + { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, + { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, + { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, + { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, + { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, +]