diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..ff261ba --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,9 @@ +ARG VARIANT="3.9" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +USER vscode + +RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash +ENV PATH=/home/vscode/.rye/shims:$PATH + +RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 43fd5a7..c17fdc1 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,15 +1,43 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the // README at: https://github.com/devcontainers/templates/tree/main/src/debian { - "name": "Development", - "image": "mcr.microsoft.com/devcontainers/typescript-node:latest", - "features": { - "ghcr.io/devcontainers/features/node:1": {} + "name": "Debian", + "build": { + "dockerfile": "Dockerfile", + "context": ".." }, - "postCreateCommand": "yarn install", + + "postStartCommand": "rye sync --all-features", + "customizations": { "vscode": { - "extensions": ["esbenp.prettier-vscode"] + "extensions": [ + "ms-python.python" + ], + "settings": { + "terminal.integrated.shell.linux": "/bin/bash", + "python.pythonPath": ".venv/bin/python", + "python.defaultInterpreterPath": ".venv/bin/python", + "python.typeChecking": "basic", + "terminal.integrated.env.linux": { + "PATH": "/home/vscode/.rye/shims:${env:PATH}" + } + } } + }, + "features": { + "ghcr.io/devcontainers/features/node:1": {} } + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fc88adb..78bafd2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,70 +16,80 @@ jobs: lint: timeout-minutes: 10 name: lint - runs-on: ${{ github.repository == 'stainless-sdks/brapi-typescript' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/brapi-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - - name: Set up Node - uses: actions/setup-node@v4 - with: - node-version: '20' + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' - - name: Bootstrap - run: ./scripts/bootstrap + - name: Install dependencies + run: rye sync --all-features - - name: Check types + - name: Run lints run: ./scripts/lint build: - timeout-minutes: 5 - name: build - runs-on: ${{ github.repository == 'stainless-sdks/brapi-typescript' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + timeout-minutes: 10 + name: build permissions: contents: read id-token: write + runs-on: ${{ github.repository == 'stainless-sdks/brapi-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 - - name: Set up Node - uses: actions/setup-node@v4 - with: - node-version: '20' + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' - - name: Bootstrap - run: ./scripts/bootstrap + - name: Install dependencies + run: rye sync --all-features - - name: Check build - run: ./scripts/build + - name: Run build + run: rye build - name: Get GitHub OIDC Token - if: github.repository == 'stainless-sdks/brapi-typescript' + if: github.repository == 'stainless-sdks/brapi-python' id: github-oidc uses: actions/github-script@v6 with: script: core.setOutput('github_token', await core.getIDToken()); - name: Upload tarball - if: github.repository == 'stainless-sdks/brapi-typescript' + if: github.repository == 'stainless-sdks/brapi-python' env: URL: https://pkg.stainless.com/s AUTH: ${{ steps.github-oidc.outputs.github_token }} SHA: ${{ github.sha }} run: ./scripts/utils/upload-artifact.sh + test: timeout-minutes: 10 name: test - runs-on: ${{ github.repository == 'stainless-sdks/brapi-typescript' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/brapi-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - - name: Set up Node - uses: actions/setup-node@v4 - with: - node-version: '20' + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' - name: Bootstrap run: ./scripts/bootstrap diff --git a/.github/workflows/publish-npm.yml b/.github/workflows/publish-npm.yml deleted file mode 100644 index 025fd66..0000000 --- a/.github/workflows/publish-npm.yml +++ /dev/null @@ -1,32 +0,0 @@ -# This workflow is triggered when a GitHub release is created. -# It can also be run manually to re-publish to NPM in case it failed for some reason. -# You can run this workflow by navigating to https://www.github.com/brapi-dev/brapi-typescript/actions/workflows/publish-npm.yml -name: Publish NPM -on: - workflow_dispatch: - - release: - types: [published] - -jobs: - publish: - name: publish - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Set up Node - uses: actions/setup-node@v3 - with: - node-version: '20' - - - name: Install dependencies - run: | - yarn install - - - name: Publish to NPM - run: | - bash ./bin/publish-npm - env: - NPM_TOKEN: ${{ secrets.BRAPI_NPM_TOKEN || secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml new file mode 100644 index 0000000..8181d42 --- /dev/null +++ b/.github/workflows/publish-pypi.yml @@ -0,0 +1,31 @@ +# This workflow is triggered when a GitHub release is created. +# It can also be run manually to re-publish to PyPI in case it failed for some reason. +# You can run this workflow by navigating to https://www.github.com/brapi-dev/brapi-python/actions/workflows/publish-pypi.yml +name: Publish PyPI +on: + workflow_dispatch: + + release: + types: [published] + +jobs: + publish: + name: publish + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Publish to PyPI + run: | + bash ./bin/publish-pypi + env: + PYPI_TOKEN: ${{ secrets.BRAPI_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 4c01291..790ebde 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -9,7 +9,7 @@ jobs: release_doctor: name: release doctor runs-on: ubuntu-latest - if: github.repository == 'brapi-dev/brapi-typescript' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') + if: github.repository == 'brapi-dev/brapi-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') steps: - uses: actions/checkout@v4 @@ -18,5 +18,4 @@ jobs: run: | bash ./bin/check-release-environment env: - NPM_TOKEN: ${{ secrets.BRAPI_NPM_TOKEN || secrets.NPM_TOKEN }} - + PYPI_TOKEN: ${{ secrets.BRAPI_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.gitignore b/.gitignore index 2412bb7..95ceb18 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,15 @@ .prism.log -node_modules -yarn-error.log -codegen.log -Brewfile.lock.json +_dev + +__pycache__ +.mypy_cache + dist -dist-deno -/*.tgz -.idea/ -.eslintcache +.venv +.idea + +.env +.envrc +codegen.log +Brewfile.lock.json diff --git a/.prettierignore b/.prettierignore deleted file mode 100644 index 3548c5a..0000000 --- a/.prettierignore +++ /dev/null @@ -1,7 +0,0 @@ -CHANGELOG.md -/ecosystem-tests/*/** -/node_modules -/deno - -# don't format tsc output, will break source maps -/dist diff --git a/.prettierrc.json b/.prettierrc.json deleted file mode 100644 index af75ada..0000000 --- a/.prettierrc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "arrowParens": "always", - "experimentalTernaries": true, - "printWidth": 110, - "singleQuote": true, - "trailingComma": "all" -} diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..43077b2 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.18 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 37fcefa..fea3454 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { ".": "1.0.0" -} +} \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 920044c..00e0370 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 11 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/alisson%2Fbrapi-bf7b0065e4057ae80522a943caa4967f1fe0aa0a6989122f5687788f39dfbdea.yml openapi_spec_hash: 7ac81061bb9f3cb0c180b82b5ea83258 -config_hash: 6487be1d01cb24761a4e2e60819d9f5a +config_hash: 6f10a67950f65bf850612b59838ad03b diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..5b01030 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.importFormat": "relative", +} diff --git a/Brewfile b/Brewfile index e4feee6..492ca37 100644 --- a/Brewfile +++ b/Brewfile @@ -1 +1,2 @@ -brew "node" +brew "rye" + diff --git a/CHANGELOG.md b/CHANGELOG.md index 27ebd57..988f730 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,12 @@ ## 1.0.0 (2025-10-12) -Full Changelog: [v0.0.1...v1.0.0](https://github.com/brapi-dev/brapi-typescript/compare/v0.0.1...v1.0.0) +Full Changelog: [v0.0.1...v1.0.0](https://github.com/brapi-dev/brapi-python/compare/v0.0.1...v1.0.0) ### Chores -* update SDK settings ([551afe3](https://github.com/brapi-dev/brapi-typescript/commit/551afe39d139c11f6e6ace580ed8faa06217a073)) -* update SDK settings ([14efeb5](https://github.com/brapi-dev/brapi-typescript/commit/14efeb558642ad68760ce354f10d6f37a1df7f68)) +* sync repo ([a54d73b](https://github.com/brapi-dev/brapi-python/commit/a54d73b6e3d9e6f0347ff61d73885cf6cadb4c56)) +* update SDK settings ([5313c1b](https://github.com/brapi-dev/brapi-python/commit/5313c1bccd7d4366b768ec14fa6a22cb0ebbefd0)) +* update SDK settings ([604a534](https://github.com/brapi-dev/brapi-python/commit/604a534b7fc3daf6e9128a98e74aca9295a02c70)) +* update SDK settings ([551afe3](https://github.com/brapi-dev/brapi-python/commit/551afe39d139c11f6e6ace580ed8faa06217a073)) +* update SDK settings ([14efeb5](https://github.com/brapi-dev/brapi-python/commit/14efeb558642ad68760ce354f10d6f37a1df7f68)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 131f6da..5068a3b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,38 +1,58 @@ ## Setting up the environment -This repository uses [`yarn@v1`](https://classic.yarnpkg.com/lang/en/docs/install). -Other package managers may work but are not officially supported for development. +### With Rye -To set up the repository, run: +We use [Rye](https://rye.astral.sh/) to manage dependencies because it will automatically provision a Python environment with the expected Python version. To set it up, run: ```sh -$ yarn -$ yarn build +$ ./scripts/bootstrap ``` -This will install all the required dependencies and build output files to `dist/`. +Or [install Rye manually](https://rye.astral.sh/guide/installation/) and run: + +```sh +$ rye sync --all-features +``` + +You can then run scripts using `rye run python script.py` or by activating the virtual environment: + +```sh +# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work +$ source .venv/bin/activate + +# now you can omit the `rye run` prefix +$ python script.py +``` + +### Without Rye + +Alternatively if you don't want to install `Rye`, you can stick with the standard `pip` setup by ensuring you have the Python version specified in `.python-version`, create a virtual environment however you desire and then install dependencies using this command: + +```sh +$ pip install -r requirements-dev.lock +``` ## Modifying/Adding code Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never -modify the contents of the `src/lib/` and `examples/` directories. +modify the contents of the `src/brapi/lib/` and `examples/` directories. ## Adding and running examples All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. -```ts -// add an example to examples/.ts +```py +# add an example to examples/.py -#!/usr/bin/env -S npm run tsn -T +#!/usr/bin/env -S rye run python … ``` ```sh -$ chmod +x examples/.ts +$ chmod +x examples/.py # run the example against your api -$ yarn tsn -T examples/.ts +$ ./examples/.py ``` ## Using the repository from source @@ -42,25 +62,25 @@ If you’d like to use the repository from source, you can either install from g To install via git: ```sh -$ npm install git+ssh://git@github.com:brapi-dev/brapi-typescript.git +$ pip install git+ssh://git@github.com/brapi-dev/brapi-python.git +``` + +Alternatively, you can build from source and install the wheel file: + +Building this package will create two files in the `dist/` directory, a `.tar.gz` containing the source files and a `.whl` that can be used to install the package efficiently. + +To create a distributable version of the library, all you have to do is run this command: + +```sh +$ rye build +# or +$ python -m build ``` -Alternatively, to link a local copy of the repo: +Then to install: ```sh -# Clone -$ git clone https://www.github.com/brapi-dev/brapi-typescript -$ cd brapi-typescript - -# With yarn -$ yarn link -$ cd ../my-package -$ yarn link brapi - -# With pnpm -$ pnpm link --global -$ cd ../my-package -$ pnpm link -—global brapi +$ pip install ./path-to-wheel-file.whl ``` ## Running tests @@ -68,40 +88,41 @@ $ pnpm link -—global brapi Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. ```sh +# you will need npm installed $ npx prism mock path/to/your/openapi.yml ``` ```sh -$ yarn run test +$ ./scripts/test ``` ## Linting and formatting -This repository uses [prettier](https://www.npmjs.com/package/prettier) and -[eslint](https://www.npmjs.com/package/eslint) to format the code in the repository. +This repository uses [ruff](https://github.com/astral-sh/ruff) and +[black](https://github.com/psf/black) to format the code in the repository. To lint: ```sh -$ yarn lint +$ ./scripts/lint ``` -To format and fix all lint issues automatically: +To format and fix all ruff issues automatically: ```sh -$ yarn fix +$ ./scripts/format ``` ## Publishing and releases -Changes made to this repository via the automated release PR pipeline should publish to npm automatically. If +Changes made to this repository via the automated release PR pipeline should publish to PyPI automatically. If the changes aren't made through the automated pipeline, you may want to make releases manually. ### Publish with a GitHub workflow -You can release to package managers by using [the `Publish NPM` GitHub action](https://www.github.com/brapi-dev/brapi-typescript/actions/workflows/publish-npm.yml). This requires a setup organization or repository secret to be set up. +You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/brapi-dev/brapi-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. ### Publish manually -If you need to manually release a package, you can run the `bin/publish-npm` script with an `NPM_TOKEN` set on +If you need to manually release a package, you can run the `bin/publish-pypi` script with a `PYPI_TOKEN` set on the environment. diff --git a/README.md b/README.md index 624e88c..4d5b6c5 100644 --- a/README.md +++ b/README.md @@ -1,72 +1,148 @@ -# Brapi TypeScript API Library +# Brapi Python API library -[![NPM version]()](https://npmjs.org/package/brapi) ![npm bundle size](https://img.shields.io/bundlephobia/minzip/brapi) - -This library provides convenient access to the Brapi REST API from server-side TypeScript or JavaScript. + +[![PyPI version](https://img.shields.io/pypi/v/brapi.svg?label=pypi%20(stable))](https://pypi.org/project/brapi/) -The REST API documentation can be found on [brapi.dev](https://brapi.dev). The full API of this library can be found in [api.md](api.md). +The Brapi Python library provides convenient access to the Brapi REST API from any Python 3.8+ +application. The library includes type definitions for all request params and response fields, +and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). It is generated with [Stainless](https://www.stainless.com/). +## Documentation + +The REST API documentation can be found on [brapi.dev](https://brapi.dev). The full API of this library can be found in [api.md](api.md). + ## Installation ```sh -npm install brapi +# install from PyPI +pip install brapi ``` ## Usage The full API of this library can be found in [api.md](api.md). - -```js -import Brapi from 'brapi'; +```python +import os +from brapi import Brapi -const client = new Brapi({ - apiKey: process.env['BRAPI_API_KEY'], // This is the default and can be omitted - environment: 'environment_1', // defaults to 'production' -}); +client = Brapi( + api_key=os.environ.get("BRAPI_API_KEY"), # This is the default and can be omitted + # defaults to "production". + environment="environment_1", +) + +quote = client.quote.retrieve( + tickers="REPLACE_ME", +) +print(quote.requested_at) +``` -const quote = await client.quote.retrieve('REPLACE_ME'); +While you can provide an `api_key` keyword argument, +we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) +to add `BRAPI_API_KEY="My API Key"` to your `.env` file +so that your API Key is not stored in source control. -console.log(quote.requestedAt); +## Async usage + +Simply import `AsyncBrapi` instead of `Brapi` and use `await` with each API call: + +```python +import os +import asyncio +from brapi import AsyncBrapi + +client = AsyncBrapi( + api_key=os.environ.get("BRAPI_API_KEY"), # This is the default and can be omitted + # defaults to "production". + environment="environment_1", +) + + +async def main() -> None: + quote = await client.quote.retrieve( + tickers="REPLACE_ME", + ) + print(quote.requested_at) + + +asyncio.run(main()) ``` -### Request & Response types +Functionality between the synchronous and asynchronous clients is otherwise identical. -This library includes TypeScript definitions for all request params and response fields. You may import and use them like so: +### With aiohttp - -```ts -import Brapi from 'brapi'; +By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend. -const client = new Brapi({ - apiKey: process.env['BRAPI_API_KEY'], // This is the default and can be omitted - environment: 'environment_1', // defaults to 'production' -}); +You can enable this by installing `aiohttp`: -const quote: Brapi.QuoteRetrieveResponse = await client.quote.retrieve('REPLACE_ME'); +```sh +# install from PyPI +pip install brapi[aiohttp] ``` -Documentation for each method, request param, and response field are available in docstrings and will appear on hover in most modern editors. +Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: + +```python +import asyncio +from brapi import DefaultAioHttpClient +from brapi import AsyncBrapi + + +async def main() -> None: + async with AsyncBrapi( + api_key="My API Key", + http_client=DefaultAioHttpClient(), + ) as client: + quote = await client.quote.retrieve( + tickers="REPLACE_ME", + ) + print(quote.requested_at) + + +asyncio.run(main()) +``` + +## Using types + +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: + +- Serializing back into JSON, `model.to_json()` +- Converting to a dictionary, `model.to_dict()` + +Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. ## Handling errors -When the library is unable to connect to the API, -or if the API returns a non-success status code (i.e., 4xx or 5xx response), -a subclass of `APIError` will be thrown: +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `brapi.APIConnectionError` is raised. - -```ts -const quote = await client.quote.retrieve('REPLACE_ME').catch(async (err) => { - if (err instanceof Brapi.APIError) { - console.log(err.status); // 400 - console.log(err.name); // BadRequestError - console.log(err.headers); // {server: 'nginx', ...} - } else { - throw err; - } -}); +When the API returns a non-success status code (that is, 4xx or 5xx +response), a subclass of `brapi.APIStatusError` is raised, containing `status_code` and `response` properties. + +All errors inherit from `brapi.APIError`. + +```python +import brapi +from brapi import Brapi + +client = Brapi() + +try: + client.quote.retrieve( + tickers="REPLACE_ME", + ) +except brapi.APIConnectionError as e: + print("The server could not be reached") + print(e.__cause__) # an underlying Exception, likely raised within httpx. +except brapi.RateLimitError as e: + print("A 429 status code was received; we should back off a bit.") +except brapi.APIStatusError as e: + print("Another non-200-range status code was received") + print(e.status_code) + print(e.response) ``` Error codes are as follows: @@ -84,247 +160,198 @@ Error codes are as follows: ### Retries -Certain errors will be automatically retried 2 times by default, with a short exponential backoff. +Certain errors are automatically retried 2 times by default, with a short exponential backoff. Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, -429 Rate Limit, and >=500 Internal errors will all be retried by default. - -You can use the `maxRetries` option to configure or disable this: +429 Rate Limit, and >=500 Internal errors are all retried by default. - -```js -// Configure the default for all requests: -const client = new Brapi({ - maxRetries: 0, // default is 2 -}); - -// Or, configure per-request: -await client.quote.retrieve('REPLACE_ME', { - maxRetries: 5, -}); -``` +You can use the `max_retries` option to configure or disable retry settings: -### Timeouts +```python +from brapi import Brapi -Requests time out after 1 minute by default. You can configure this with a `timeout` option: +# Configure the default for all requests: +client = Brapi( + # default is 2 + max_retries=0, +) - -```ts -// Configure the default for all requests: -const client = new Brapi({ - timeout: 20 * 1000, // 20 seconds (default is 1 minute) -}); - -// Override per-request: -await client.quote.retrieve('REPLACE_ME', { - timeout: 5 * 1000, -}); +# Or, configure per-request: +client.with_options(max_retries=5).quote.retrieve( + tickers="REPLACE_ME", +) ``` -On timeout, an `APIConnectionTimeoutError` is thrown. - -Note that requests which time out will be [retried twice by default](#retries). - -## Advanced Usage - -### Accessing raw Response data (e.g., headers) +### Timeouts -The "raw" `Response` returned by `fetch()` can be accessed through the `.asResponse()` method on the `APIPromise` type that all methods return. -This method returns as soon as the headers for a successful response are received and does not consume the response body, so you are free to write custom parsing or streaming logic. +By default requests time out after 1 minute. You can configure this with a `timeout` option, +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: -You can also use the `.withResponse()` method to get the raw `Response` along with the parsed data. -Unlike `.asResponse()` this method consumes the body, returning once it is parsed. +```python +from brapi import Brapi - -```ts -const client = new Brapi(); +# Configure the default for all requests: +client = Brapi( + # 20 seconds (default is 1 minute) + timeout=20.0, +) -const response = await client.quote.retrieve('REPLACE_ME').asResponse(); -console.log(response.headers.get('X-My-Header')); -console.log(response.statusText); // access the underlying Response object +# More granular control: +client = Brapi( + timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), +) -const { data: quote, response: raw } = await client.quote.retrieve('REPLACE_ME').withResponse(); -console.log(raw.headers.get('X-My-Header')); -console.log(quote.requestedAt); +# Override per-request: +client.with_options(timeout=5.0).quote.retrieve( + tickers="REPLACE_ME", +) ``` -### Logging +On timeout, an `APITimeoutError` is thrown. -> [!IMPORTANT] -> All log messages are intended for debugging only. The format and content of log messages -> may change between releases. +Note that requests that time out are [retried twice by default](#retries). -#### Log levels +## Advanced -The log level can be configured in two ways: +### Logging -1. Via the `BRAPI_LOG` environment variable -2. Using the `logLevel` client option (overrides the environment variable if set) +We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. -```ts -import Brapi from 'brapi'; +You can enable logging by setting the environment variable `BRAPI_LOG` to `info`. -const client = new Brapi({ - logLevel: 'debug', // Show all log messages -}); +```shell +$ export BRAPI_LOG=info ``` -Available log levels, from most to least verbose: +Or to `debug` for more verbose logging. -- `'debug'` - Show debug messages, info, warnings, and errors -- `'info'` - Show info messages, warnings, and errors -- `'warn'` - Show warnings and errors (default) -- `'error'` - Show only errors -- `'off'` - Disable all logging +### How to tell whether `None` means `null` or missing -At the `'debug'` level, all HTTP requests and responses are logged, including headers and bodies. -Some authentication-related headers are redacted, but sensitive data in request and response bodies -may still be visible. +In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: -#### Custom logger +```py +if response.my_field is None: + if 'my_field' not in response.model_fields_set: + print('Got json like {}, without a "my_field" key present at all.') + else: + print('Got json like {"my_field": null}.') +``` -By default, this library logs to `globalThis.console`. You can also provide a custom logger. -Most logging libraries are supported, including [pino](https://www.npmjs.com/package/pino), [winston](https://www.npmjs.com/package/winston), [bunyan](https://www.npmjs.com/package/bunyan), [consola](https://www.npmjs.com/package/consola), [signale](https://www.npmjs.com/package/signale), and [@std/log](https://jsr.io/@std/log). If your logger doesn't work, please open an issue. +### Accessing raw response data (e.g. headers) -When providing a custom logger, the `logLevel` option still controls which messages are emitted, messages -below the configured level will not be sent to your logger. +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., -```ts -import Brapi from 'brapi'; -import pino from 'pino'; +```py +from brapi import Brapi -const logger = pino(); +client = Brapi() +response = client.quote.with_raw_response.retrieve( + tickers="REPLACE_ME", +) +print(response.headers.get('X-My-Header')) -const client = new Brapi({ - logger: logger.child({ name: 'Brapi' }), - logLevel: 'debug', // Send all messages to pino, allowing it to filter -}); +quote = response.parse() # get the object that `quote.retrieve()` would have returned +print(quote.requested_at) ``` -### Making custom/undocumented requests - -This library is typed for convenient access to the documented API. If you need to access undocumented -endpoints, params, or response properties, the library can still be used. +These methods return an [`APIResponse`](https://github.com/brapi-dev/brapi-python/tree/main/src/brapi/_response.py) object. -#### Undocumented endpoints +The async client returns an [`AsyncAPIResponse`](https://github.com/brapi-dev/brapi-python/tree/main/src/brapi/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. -To make requests to undocumented endpoints, you can use `client.get`, `client.post`, and other HTTP verbs. -Options on the client, such as retries, will be respected when making these requests. +#### `.with_streaming_response` -```ts -await client.post('/some/path', { - body: { some_prop: 'foo' }, - query: { some_query_arg: 'bar' }, -}); -``` +The above interface eagerly reads the full response body when you make the request, which may not always be what you want. -#### Undocumented request params +To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. -To make requests using undocumented parameters, you may use `// @ts-expect-error` on the undocumented -parameter. This library doesn't validate at runtime that the request matches the type, so any extra values you -send will be sent as-is. +```python +with client.quote.with_streaming_response.retrieve( + tickers="REPLACE_ME", +) as response: + print(response.headers.get("X-My-Header")) -```ts -client.quote.retrieve({ - // ... - // @ts-expect-error baz is not yet public - baz: 'undocumented option', -}); + for line in response.iter_lines(): + print(line) ``` -For requests with the `GET` verb, any extra params will be in the query, all other requests will send the -extra param in the body. +The context manager is required so that the response will reliably be closed. -If you want to explicitly send an extra argument, you can do so with the `query`, `body`, and `headers` request -options. +### Making custom/undocumented requests -#### Undocumented response properties +This library is typed for convenient access to the documented API. -To access undocumented response properties, you may access the response object with `// @ts-expect-error` on -the response object, or cast the response object to the requisite type. Like the request params, we do not -validate or strip extra properties from the response from the API. +If you need to access undocumented endpoints, params, or response properties, the library can still be used. -### Customizing the fetch client +#### Undocumented endpoints -By default, this library expects a global `fetch` function is defined. +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) when making this request. -If you want to use a different `fetch` function, you can either polyfill the global: +```py +import httpx -```ts -import fetch from 'my-fetch'; +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) -globalThis.fetch = fetch; +print(response.headers.get("x-foo")) ``` -Or pass it to the client: +#### Undocumented request params -```ts -import Brapi from 'brapi'; -import fetch from 'my-fetch'; +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. -const client = new Brapi({ fetch }); -``` +#### Undocumented response properties -### Fetch options +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). -If you want to set custom `fetch` options without overriding the `fetch` function, you can provide a `fetchOptions` object when instantiating the client or making a request. (Request-specific options override client options.) +### Configuring the HTTP client -```ts -import Brapi from 'brapi'; +You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: -const client = new Brapi({ - fetchOptions: { - // `RequestInit` options - }, -}); -``` +- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) +- Custom [transports](https://www.python-httpx.org/advanced/transports/) +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality -#### Configuring proxies +```python +import httpx +from brapi import Brapi, DefaultHttpxClient -To modify proxy behavior, you can provide custom `fetchOptions` that add runtime-specific proxy -options to requests: - - **Node** [[docs](https://github.com/nodejs/undici/blob/main/docs/docs/api/ProxyAgent.md#example---proxyagent-with-fetch)] +client = Brapi( + # Or use the `BRAPI_BASE_URL` env var + base_url="http://my.test.server.example.com:8083", + http_client=DefaultHttpxClient( + proxy="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` -```ts -import Brapi from 'brapi'; -import * as undici from 'undici'; +You can also customize the client on a per-request basis by using `with_options()`: -const proxyAgent = new undici.ProxyAgent('http://localhost:8888'); -const client = new Brapi({ - fetchOptions: { - dispatcher: proxyAgent, - }, -}); +```python +client.with_options(http_client=DefaultHttpxClient(...)) ``` - **Bun** [[docs](https://bun.sh/guides/http/proxy)] +### Managing HTTP resources -```ts -import Brapi from 'brapi'; - -const client = new Brapi({ - fetchOptions: { - proxy: 'http://localhost:8888', - }, -}); -``` +By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. - **Deno** [[docs](https://docs.deno.com/api/deno/~/Deno.createHttpClient)] +```py +from brapi import Brapi -```ts -import Brapi from 'npm:brapi'; +with Brapi() as client: + # make requests here + ... -const httpClient = Deno.createHttpClient({ proxy: { url: 'http://localhost:8888' } }); -const client = new Brapi({ - fetchOptions: { - client: httpClient, - }, -}); +# HTTP client is now closed ``` -## Frequently Asked Questions - -## Semantic versioning +## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: @@ -334,26 +361,22 @@ This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) con We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. -We are keen for your feedback; please open an [issue](https://www.github.com/brapi-dev/brapi-typescript/issues) with questions, bugs, or suggestions. +We are keen for your feedback; please open an [issue](https://www.github.com/brapi-dev/brapi-python/issues) with questions, bugs, or suggestions. -## Requirements +### Determining the installed version -TypeScript >= 4.9 is supported. +If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. -The following runtimes are supported: +You can determine the version that is being used at runtime with: -- Web browsers (Up-to-date Chrome, Firefox, Safari, Edge, and more) -- Node.js 20 LTS or later ([non-EOL](https://endoflife.date/nodejs)) versions. -- Deno v1.28.0 or higher. -- Bun 1.0 or later. -- Cloudflare Workers. -- Vercel Edge Runtime. -- Jest 28 or greater with the `"node"` environment (`"jsdom"` is not supported at this time). -- Nitro v2.6 or greater. +```py +import brapi +print(brapi.__version__) +``` -Note that React Native is not supported at this time. +## Requirements -If you are interested in other runtime environments, please open or upvote an issue on GitHub. +Python 3.8 or higher. ## Contributing diff --git a/api.md b/api.md index 0da5663..5d2bfbf 100644 --- a/api.md +++ b/api.md @@ -2,29 +2,35 @@ Types: -- BalanceSheetEntry -- CashflowEntry -- DefaultKeyStatisticsEntry -- FinancialDataEntry -- IncomeStatementEntry -- ValueAddedEntry -- QuoteRetrieveResponse -- QuoteListResponse +```python +from brapi.types import ( + BalanceSheetEntry, + CashflowEntry, + DefaultKeyStatisticsEntry, + FinancialDataEntry, + IncomeStatementEntry, + ValueAddedEntry, + QuoteRetrieveResponse, + QuoteListResponse, +) +``` Methods: -- client.quote.retrieve(tickers, { ...params }) -> QuoteRetrieveResponse -- client.quote.list({ ...params }) -> QuoteListResponse +- client.quote.retrieve(tickers, \*\*params) -> QuoteRetrieveResponse +- client.quote.list(\*\*params) -> QuoteListResponse # Available Types: -- AvailableListResponse +```python +from brapi.types import AvailableListResponse +``` Methods: -- client.available.list({ ...params }) -> AvailableListResponse +- client.available.list(\*\*params) -> AvailableListResponse # V2 @@ -32,46 +38,50 @@ Methods: Types: -- CryptoRetrieveResponse -- CryptoListAvailableResponse +```python +from brapi.types.v2 import CryptoRetrieveResponse, CryptoListAvailableResponse +``` Methods: -- client.v2.crypto.retrieve({ ...params }) -> CryptoRetrieveResponse -- client.v2.crypto.listAvailable({ ...params }) -> CryptoListAvailableResponse +- client.v2.crypto.retrieve(\*\*params) -> CryptoRetrieveResponse +- client.v2.crypto.list_available(\*\*params) -> CryptoListAvailableResponse ## Currency Types: -- CurrencyRetrieveResponse -- CurrencyListAvailableResponse +```python +from brapi.types.v2 import CurrencyRetrieveResponse, CurrencyListAvailableResponse +``` Methods: -- client.v2.currency.retrieve({ ...params }) -> CurrencyRetrieveResponse -- client.v2.currency.listAvailable({ ...params }) -> CurrencyListAvailableResponse +- client.v2.currency.retrieve(\*\*params) -> CurrencyRetrieveResponse +- client.v2.currency.list_available(\*\*params) -> CurrencyListAvailableResponse ## Inflation Types: -- InflationRetrieveResponse -- InflationListAvailableResponse +```python +from brapi.types.v2 import InflationRetrieveResponse, InflationListAvailableResponse +``` Methods: -- client.v2.inflation.retrieve({ ...params }) -> InflationRetrieveResponse -- client.v2.inflation.listAvailable({ ...params }) -> InflationListAvailableResponse +- client.v2.inflation.retrieve(\*\*params) -> InflationRetrieveResponse +- client.v2.inflation.list_available(\*\*params) -> InflationListAvailableResponse ## PrimeRate Types: -- PrimeRateRetrieveResponse -- PrimeRateListAvailableResponse +```python +from brapi.types.v2 import PrimeRateRetrieveResponse, PrimeRateListAvailableResponse +``` Methods: -- client.v2.primeRate.retrieve({ ...params }) -> PrimeRateRetrieveResponse -- client.v2.primeRate.listAvailable({ ...params }) -> PrimeRateListAvailableResponse +- client.v2.prime_rate.retrieve(\*\*params) -> PrimeRateRetrieveResponse +- client.v2.prime_rate.list_available(\*\*params) -> PrimeRateListAvailableResponse diff --git a/bin/check-release-environment b/bin/check-release-environment index e4b6d58..b845b0f 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -2,8 +2,8 @@ errors=() -if [ -z "${NPM_TOKEN}" ]; then - errors+=("The NPM_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets") +if [ -z "${PYPI_TOKEN}" ]; then + errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") fi lenErrors=${#errors[@]} @@ -19,4 +19,3 @@ if [[ lenErrors -gt 0 ]]; then fi echo "The environment is ready to push releases!" - diff --git a/bin/publish-npm b/bin/publish-npm deleted file mode 100644 index 45e8aa8..0000000 --- a/bin/publish-npm +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env bash - -set -eux - -npm config set '//registry.npmjs.org/:_authToken' "$NPM_TOKEN" - -yarn build -cd dist - -# Get package name and version from package.json -PACKAGE_NAME="$(jq -r -e '.name' ./package.json)" -VERSION="$(jq -r -e '.version' ./package.json)" - -# Get latest version from npm -# -# If the package doesn't exist, npm will return: -# { -# "error": { -# "code": "E404", -# "summary": "Unpublished on 2025-06-05T09:54:53.528Z", -# "detail": "'the_package' is not in this registry..." -# } -# } -NPM_INFO="$(npm view "$PACKAGE_NAME" version --json 2>/dev/null || true)" - -# Check if we got an E404 error -if echo "$NPM_INFO" | jq -e '.error.code == "E404"' > /dev/null 2>&1; then - # Package doesn't exist yet, no last version - LAST_VERSION="" -elif echo "$NPM_INFO" | jq -e '.error' > /dev/null 2>&1; then - # Report other errors - echo "ERROR: npm returned unexpected data:" - echo "$NPM_INFO" - exit 1 -else - # Success - get the version - LAST_VERSION=$(echo "$NPM_INFO" | jq -r '.') # strip quotes -fi - -# Check if current version is pre-release (e.g. alpha / beta / rc) -CURRENT_IS_PRERELEASE=false -if [[ "$VERSION" =~ -([a-zA-Z]+) ]]; then - CURRENT_IS_PRERELEASE=true - CURRENT_TAG="${BASH_REMATCH[1]}" -fi - -# Check if last version is a stable release -LAST_IS_STABLE_RELEASE=true -if [[ -z "$LAST_VERSION" || "$LAST_VERSION" =~ -([a-zA-Z]+) ]]; then - LAST_IS_STABLE_RELEASE=false -fi - -# Use a corresponding alpha/beta tag if there already is a stable release and we're publishing a prerelease. -if $CURRENT_IS_PRERELEASE && $LAST_IS_STABLE_RELEASE; then - TAG="$CURRENT_TAG" -else - TAG="latest" -fi - -# Publish with the appropriate tag -yarn publish --tag "$TAG" diff --git a/bin/publish-pypi b/bin/publish-pypi new file mode 100644 index 0000000..826054e --- /dev/null +++ b/bin/publish-pypi @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -eux +mkdir -p dist +rye build --clean +rye publish --yes --token=$PYPI_TOKEN diff --git a/eslint.config.mjs b/eslint.config.mjs deleted file mode 100644 index 5b507c5..0000000 --- a/eslint.config.mjs +++ /dev/null @@ -1,42 +0,0 @@ -// @ts-check -import tseslint from 'typescript-eslint'; -import unusedImports from 'eslint-plugin-unused-imports'; -import prettier from 'eslint-plugin-prettier'; - -export default tseslint.config( - { - languageOptions: { - parser: tseslint.parser, - parserOptions: { sourceType: 'module' }, - }, - files: ['**/*.ts', '**/*.mts', '**/*.cts', '**/*.js', '**/*.mjs', '**/*.cjs'], - ignores: ['dist/'], - plugins: { - '@typescript-eslint': tseslint.plugin, - 'unused-imports': unusedImports, - prettier, - }, - rules: { - 'no-unused-vars': 'off', - 'prettier/prettier': 'error', - 'unused-imports/no-unused-imports': 'error', - 'no-restricted-imports': [ - 'error', - { - patterns: [ - { - regex: '^brapi(/.*)?', - message: 'Use a relative import, not a package import.', - }, - ], - }, - ], - }, - }, - { - files: ['tests/**', 'examples/**'], - rules: { - 'no-restricted-imports': 'off', - }, - }, -); diff --git a/examples/.keep b/examples/.keep index 0651c89..d8c73e9 100644 --- a/examples/.keep +++ b/examples/.keep @@ -1,4 +1,4 @@ File generated from our OpenAPI spec by Stainless. This directory can be used to store example files demonstrating usage of this SDK. -It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/jest.config.ts b/jest.config.ts deleted file mode 100644 index d2dff7e..0000000 --- a/jest.config.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { JestConfigWithTsJest } from 'ts-jest'; - -const config: JestConfigWithTsJest = { - preset: 'ts-jest/presets/default-esm', - testEnvironment: 'node', - transform: { - '^.+\\.(t|j)sx?$': ['@swc/jest', { sourceMaps: 'inline' }], - }, - moduleNameMapper: { - '^brapi$': '/src/index.ts', - '^brapi/(.*)$': '/src/$1', - }, - modulePathIgnorePatterns: [ - '/ecosystem-tests/', - '/dist/', - '/deno/', - '/deno_tests/', - '/packages/', - ], - testPathIgnorePatterns: ['scripts'], -}; - -export default config; diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..53bca7f --- /dev/null +++ b/noxfile.py @@ -0,0 +1,9 @@ +import nox + + +@nox.session(reuse_venv=True, name="test-pydantic-v1") +def test_pydantic_v1(session: nox.Session) -> None: + session.install("-r", "requirements-dev.lock") + session.install("pydantic<2") + + session.run("pytest", "--showlocals", "--ignore=tests/functional", *session.posargs) diff --git a/package.json b/package.json deleted file mode 100644 index 164fae7..0000000 --- a/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "brapi", - "version": "1.0.0", - "description": "The official TypeScript library for the Brapi API", - "author": "Brapi ", - "types": "dist/index.d.ts", - "main": "dist/index.js", - "type": "commonjs", - "repository": "github:brapi-dev/brapi-typescript", - "license": "Apache-2.0", - "packageManager": "yarn@1.22.22", - "files": [ - "**/*" - ], - "private": false, - "publishConfig": { - "access": "public" - }, - "scripts": { - "test": "./scripts/test", - "build": "./scripts/build", - "prepublishOnly": "echo 'to publish, run yarn build && (cd dist; yarn publish)' && exit 1", - "format": "./scripts/format", - "prepare": "if ./scripts/utils/check-is-in-git-install.sh; then ./scripts/build && ./scripts/utils/git-swap.sh; fi", - "tsn": "ts-node -r tsconfig-paths/register", - "lint": "./scripts/lint", - "fix": "./scripts/format" - }, - "dependencies": {}, - "devDependencies": { - "@arethetypeswrong/cli": "^0.17.0", - "@swc/core": "^1.3.102", - "@swc/jest": "^0.2.29", - "@types/jest": "^29.4.0", - "@types/node": "^20.17.6", - "@typescript-eslint/eslint-plugin": "8.31.1", - "@typescript-eslint/parser": "8.31.1", - "eslint": "^9.20.1", - "eslint-plugin-prettier": "^5.4.1", - "eslint-plugin-unused-imports": "^4.1.4", - "iconv-lite": "^0.6.3", - "jest": "^29.4.0", - "prettier": "^3.0.0", - "publint": "^0.2.12", - "ts-jest": "^29.1.0", - "ts-node": "^10.5.0", - "tsc-multi": "https://github.com/stainless-api/tsc-multi/releases/download/v1.1.9/tsc-multi.tgz", - "tsconfig-paths": "^4.0.0", - "tslib": "^2.8.1", - "typescript": "5.8.3", - "typescript-eslint": "8.31.1" - }, - "exports": { - ".": { - "import": "./dist/index.mjs", - "require": "./dist/index.js" - }, - "./*.mjs": { - "default": "./dist/*.mjs" - }, - "./*.js": { - "default": "./dist/*.js" - }, - "./*": { - "import": "./dist/*.mjs", - "require": "./dist/*.js" - } - } -} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..86d0287 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,267 @@ +[project] +name = "brapi" +version = "1.0.0" +description = "The official Python library for the brapi API" +dynamic = ["readme"] +license = "Apache-2.0" +authors = [ +{ name = "Brapi", email = "contact@brapi.dev" }, +] +dependencies = [ + "httpx>=0.23.0, <1", + "pydantic>=1.9.0, <3", + "typing-extensions>=4.10, <5", + "anyio>=3.5.0, <5", + "distro>=1.7.0, <2", + "sniffio", +] +requires-python = ">= 3.8" +classifiers = [ + "Typing :: Typed", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: Apache Software License" +] + +[project.urls] +Homepage = "https://github.com/brapi-dev/brapi-python" +Repository = "https://github.com/brapi-dev/brapi-python" + +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] + +[tool.rye] +managed = true +# version pins are in requirements-dev.lock +dev-dependencies = [ + "pyright==1.1.399", + "mypy", + "respx", + "pytest", + "pytest-asyncio", + "ruff", + "time-machine", + "nox", + "dirty-equals>=0.6.0", + "importlib-metadata>=6.7.0", + "rich>=13.7.1", + "pytest-xdist>=3.6.1", +] + +[tool.rye.scripts] +format = { chain = [ + "format:ruff", + "format:docs", + "fix:ruff", + # run formatting again to fix any inconsistencies when imports are stripped + "format:ruff", +]} +"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md" +"format:ruff" = "ruff format" + +"lint" = { chain = [ + "check:ruff", + "typecheck", + "check:importable", +]} +"check:ruff" = "ruff check ." +"fix:ruff" = "ruff check --fix ." + +"check:importable" = "python -c 'import brapi'" + +typecheck = { chain = [ + "typecheck:pyright", + "typecheck:mypy" +]} +"typecheck:pyright" = "pyright" +"typecheck:verify-types" = "pyright --verifytypes brapi --ignoreexternal" +"typecheck:mypy" = "mypy ." + +[build-system] +requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"] +build-backend = "hatchling.build" + +[tool.hatch.build] +include = [ + "src/*" +] + +[tool.hatch.build.targets.wheel] +packages = ["src/brapi"] + +[tool.hatch.build.targets.sdist] +# Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) +include = [ + "/*.toml", + "/*.json", + "/*.lock", + "/*.md", + "/mypy.ini", + "/noxfile.py", + "bin/*", + "examples/*", + "src/*", + "tests/*", +] + +[tool.hatch.metadata.hooks.fancy-pypi-readme] +content-type = "text/markdown" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] +path = "README.md" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]] +# replace relative links with absolute links +pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' +replacement = '[\1](https://github.com/brapi-dev/brapi-python/tree/main/\g<2>)' + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = "--tb=short -n auto" +xfail_strict = true +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" +filterwarnings = [ + "error" +] + +[tool.pyright] +# this enables practically every flag given by pyright. +# there are a couple of flags that are still disabled by +# default in strict mode as they are experimental and niche. +typeCheckingMode = "strict" +pythonVersion = "3.8" + +exclude = [ + "_dev", + ".venv", + ".nox", + ".git", +] + +reportImplicitOverride = true +reportOverlappingOverload = false + +reportImportCycles = false +reportPrivateUsage = false + +[tool.mypy] +pretty = true +show_error_codes = true + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ['src/brapi/_files.py', '_dev/.*.py', 'tests/.*'] + +strict_equality = true +implicit_reexport = true +check_untyped_defs = true +no_implicit_optional = true + +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = false +warn_redundant_casts = false + +disallow_any_generics = true +disallow_untyped_defs = true +disallow_untyped_calls = true +disallow_subclassing_any = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +cache_fine_grained = true + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = "func-returns-value,overload-cannot-match" + +# https://github.com/python/mypy/issues/12162 +[[tool.mypy.overrides]] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true + + +[tool.ruff] +line-length = 120 +output-format = "grouped" +target-version = "py38" + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] +select = [ + # isort + "I", + # bugbear rules + "B", + # remove unused imports + "F401", + # check for missing future annotations + "FA102", + # bare except statements + "E722", + # unused arguments + "ARG", + # print statements + "T201", + "T203", + # misuse of typing.TYPE_CHECKING + "TC004", + # import rules + "TID251", +] +ignore = [ + # mutable defaults + "B006", +] +unfixable = [ + # disable auto fix for print statements + "T201", + "T203", +] + +extend-safe-fixes = ["FA102"] + +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead" + +[tool.ruff.lint.isort] +length-sort = true +length-sort-straight = true +combine-as-imports = true +extra-standard-library = ["typing_extensions"] +known-first-party = ["brapi", "tests"] + +[tool.ruff.lint.per-file-ignores] +"bin/**.py" = ["T201", "T203"] +"scripts/**.py" = ["T201", "T203"] +"tests/**.py" = ["T201", "T203"] +"examples/**.py" = ["T201", "T203"] diff --git a/release-please-config.json b/release-please-config.json index 1ebd0bd..f365cd0 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -59,6 +59,8 @@ "hidden": true } ], - "release-type": "node", - "extra-files": ["src/version.ts", "README.md"] -} + "release-type": "python", + "extra-files": [ + "src/brapi/_version.py" + ] +} \ No newline at end of file diff --git a/requirements-dev.lock b/requirements-dev.lock new file mode 100644 index 0000000..6f06451 --- /dev/null +++ b/requirements-dev.lock @@ -0,0 +1,137 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via brapi + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.4.0 + # via brapi + # via httpx +argcomplete==3.1.2 + # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp +certifi==2023.7.22 + # via httpcore + # via httpx +colorlog==6.7.0 + # via nox +dirty-equals==0.6.0 +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via brapi +exceptiongroup==1.2.2 + # via anyio + # via pytest +execnet==2.1.1 + # via pytest-xdist +filelock==3.12.4 + # via virtualenv +frozenlist==1.6.2 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via brapi + # via httpx-aiohttp + # via respx +httpx-aiohttp==0.1.8 + # via brapi +idna==3.4 + # via anyio + # via httpx + # via yarl +importlib-metadata==7.0.0 +iniconfig==2.0.0 + # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.4.4 + # via aiohttp + # via yarl +mypy==1.14.1 +mypy-extensions==1.0.0 + # via mypy +nodeenv==1.8.0 + # via pyright +nox==2023.4.22 +packaging==23.2 + # via nox + # via pytest +platformdirs==3.11.0 + # via virtualenv +pluggy==1.5.0 + # via pytest +propcache==0.3.1 + # via aiohttp + # via yarl +pydantic==2.11.9 + # via brapi +pydantic-core==2.33.2 + # via pydantic +pygments==2.18.0 + # via rich +pyright==1.1.399 +pytest==8.3.3 + # via pytest-asyncio + # via pytest-xdist +pytest-asyncio==0.24.0 +pytest-xdist==3.7.0 +python-dateutil==2.8.2 + # via time-machine +pytz==2023.3.post1 + # via dirty-equals +respx==0.22.0 +rich==13.7.1 +ruff==0.9.4 +setuptools==68.2.2 + # via nodeenv +six==1.16.0 + # via python-dateutil +sniffio==1.3.0 + # via anyio + # via brapi +time-machine==2.9.0 +tomli==2.0.2 + # via mypy + # via pytest +typing-extensions==4.12.2 + # via anyio + # via brapi + # via multidict + # via mypy + # via pydantic + # via pydantic-core + # via pyright + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic +virtualenv==20.24.5 + # via nox +yarl==1.20.0 + # via aiohttp +zipp==3.17.0 + # via importlib-metadata diff --git a/requirements.lock b/requirements.lock new file mode 100644 index 0000000..6018e55 --- /dev/null +++ b/requirements.lock @@ -0,0 +1,75 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via brapi + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.4.0 + # via brapi + # via httpx +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp +certifi==2023.7.22 + # via httpcore + # via httpx +distro==1.8.0 + # via brapi +exceptiongroup==1.2.2 + # via anyio +frozenlist==1.6.2 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via brapi + # via httpx-aiohttp +httpx-aiohttp==0.1.8 + # via brapi +idna==3.4 + # via anyio + # via httpx + # via yarl +multidict==6.4.4 + # via aiohttp + # via yarl +propcache==0.3.1 + # via aiohttp + # via yarl +pydantic==2.11.9 + # via brapi +pydantic-core==2.33.2 + # via pydantic +sniffio==1.3.0 + # via anyio + # via brapi +typing-extensions==4.12.2 + # via anyio + # via brapi + # via multidict + # via pydantic + # via pydantic-core + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic +yarl==1.20.0 + # via aiohttp diff --git a/scripts/bootstrap b/scripts/bootstrap index a8b69ff..b430fee 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -19,8 +19,9 @@ if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] } fi -echo "==> Installing Node dependencies…" +echo "==> Installing Python dependencies…" -PACKAGE_MANAGER=$(command -v yarn >/dev/null 2>&1 && echo "yarn" || echo "npm") +# experimental uv support makes installations significantly faster +rye config --set-bool behavior.use-uv=true -$PACKAGE_MANAGER install "$@" +rye sync --all-features diff --git a/scripts/build b/scripts/build deleted file mode 100755 index c02a641..0000000 --- a/scripts/build +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -set -exuo pipefail - -cd "$(dirname "$0")/.." - -node scripts/utils/check-version.cjs - -# Build into dist and will publish the package from there, -# so that src/resources/foo.ts becomes /resources/foo.js -# This way importing from `"brapi/resources/foo"` works -# even with `"moduleResolution": "node"` - -rm -rf dist; mkdir dist -# Copy src to dist/src and build from dist/src into dist, so that -# the source map for index.js.map will refer to ./src/index.ts etc -cp -rp src README.md dist -for file in LICENSE CHANGELOG.md; do - if [ -e "${file}" ]; then cp "${file}" dist; fi -done -if [ -e "bin/cli" ]; then - mkdir -p dist/bin - cp -p "bin/cli" dist/bin/; -fi -if [ -e "bin/migration-config.json" ]; then - mkdir -p dist/bin - cp -p "bin/migration-config.json" dist/bin/; -fi -# this converts the export map paths for the dist directory -# and does a few other minor things -node scripts/utils/make-dist-package-json.cjs > dist/package.json - -# build to .js/.mjs/.d.ts files -./node_modules/.bin/tsc-multi -# we need to patch index.js so that `new module.exports()` works for cjs backwards -# compat. No way to get that from index.ts because it would cause compile errors -# when building .mjs -node scripts/utils/fix-index-exports.cjs -cp tsconfig.dist-src.json dist/src/tsconfig.json - -node scripts/utils/postprocess-files.cjs - -# make sure that nothing crashes when we require the output CJS or -# import the output ESM -(cd dist && node -e 'require("brapi")') -(cd dist && node -e 'import("brapi")' --input-type=module) - -if [ -e ./scripts/build-deno ] -then - ./scripts/build-deno -fi diff --git a/scripts/fast-format b/scripts/fast-format deleted file mode 100755 index 53721ac..0000000 --- a/scripts/fast-format +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -echo "Script started with $# arguments" -echo "Arguments: $*" -echo "Script location: $(dirname "$0")" - -cd "$(dirname "$0")/.." -echo "Changed to directory: $(pwd)" - -if [ $# -eq 0 ]; then - echo "Usage: $0 [additional-formatter-args...]" - echo "The file should contain one file path per line" - exit 1 -fi - -FILE_LIST="$1" - -echo "Looking for file: $FILE_LIST" - -if [ ! -f "$FILE_LIST" ]; then - echo "Error: File '$FILE_LIST' not found" - exit 1 -fi - -echo "==> Running eslint --fix" -ESLINT_FILES="$(grep '\.ts$' "$FILE_LIST" || true)" -if ! [ -z "$ESLINT_FILES" ]; then - echo "$ESLINT_FILES" | xargs ./node_modules/.bin/eslint --cache --fix -fi - -echo "==> Running prettier --write" -# format things eslint didn't -PRETTIER_FILES="$(grep '\.\(js\|json\)$' "$FILE_LIST" || true)" -if ! [ -z "$PRETTIER_FILES" ]; then - echo "$PRETTIER_FILES" | xargs ./node_modules/.bin/prettier \ - --write --cache --cache-strategy metadata --no-error-on-unmatched-pattern \ - '!**/dist' '!**/*.ts' '!**/*.mts' '!**/*.cts' '!**/*.js' '!**/*.mjs' '!**/*.cjs' -fi diff --git a/scripts/format b/scripts/format index 7a75640..667ec2d 100755 --- a/scripts/format +++ b/scripts/format @@ -4,9 +4,5 @@ set -e cd "$(dirname "$0")/.." -echo "==> Running eslint --fix" -./node_modules/.bin/eslint --fix . - -echo "==> Running prettier --write" -# format things eslint didn't -./node_modules/.bin/prettier --write --cache --cache-strategy metadata . '!**/dist' '!**/*.ts' '!**/*.mts' '!**/*.cts' '!**/*.js' '!**/*.mjs' '!**/*.cjs' +echo "==> Running formatters" +rye run format diff --git a/scripts/lint b/scripts/lint index 3ffb78a..e8935a5 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,18 +4,8 @@ set -e cd "$(dirname "$0")/.." -echo "==> Running eslint" -./node_modules/.bin/eslint . +echo "==> Running lints" +rye run lint -echo "==> Building" -./scripts/build - -echo "==> Checking types" -./node_modules/typescript/bin/tsc - -echo "==> Running Are The Types Wrong?" -./node_modules/.bin/attw --pack dist -f json >.attw.json || true -node scripts/utils/attw-report.cjs - -echo "==> Running publint" -./node_modules/.bin/publint dist +echo "==> Making sure it imports" +rye run python -c 'import brapi' diff --git a/scripts/test b/scripts/test index 7bce051..dbeda2d 100755 --- a/scripts/test +++ b/scripts/test @@ -52,5 +52,10 @@ else echo fi +export DEFER_PYDANTIC_BUILD=false + echo "==> Running tests" -./node_modules/.bin/jest "$@" +rye run pytest "$@" + +echo "==> Running Pydantic v1 tests" +rye run nox -s test-pydantic-v1 -- "$@" diff --git a/scripts/utils/attw-report.cjs b/scripts/utils/attw-report.cjs deleted file mode 100644 index b3477c0..0000000 --- a/scripts/utils/attw-report.cjs +++ /dev/null @@ -1,24 +0,0 @@ -const fs = require('fs'); -const problems = Object.values(JSON.parse(fs.readFileSync('.attw.json', 'utf-8')).problems) - .flat() - .filter( - (problem) => - !( - // This is intentional, if the user specifies .mjs they get ESM. - ( - (problem.kind === 'CJSResolvesToESM' && problem.entrypoint.endsWith('.mjs')) || - // This is intentional for backwards compat reasons. - (problem.kind === 'MissingExportEquals' && problem.implementationFileName.endsWith('/index.js')) || - // this is intentional, we deliberately attempt to import types that may not exist from parent node_modules - // folders to better support various runtimes without triggering automatic type acquisition. - (problem.kind === 'InternalResolutionError' && problem.moduleSpecifier.includes('node_modules')) - ) - ), - ); -fs.unlinkSync('.attw.json'); -if (problems.length) { - process.stdout.write('The types are wrong!\n' + JSON.stringify(problems, null, 2) + '\n'); - process.exitCode = 1; -} else { - process.stdout.write('Types ok!\n'); -} diff --git a/scripts/utils/check-is-in-git-install.sh b/scripts/utils/check-is-in-git-install.sh deleted file mode 100755 index 1354eb4..0000000 --- a/scripts/utils/check-is-in-git-install.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -# Check if you happen to call prepare for a repository that's already in node_modules. -[ "$(basename "$(dirname "$PWD")")" = 'node_modules' ] || -# The name of the containing directory that 'npm` uses, which looks like -# $HOME/.npm/_cacache/git-cloneXXXXXX -[ "$(basename "$(dirname "$PWD")")" = 'tmp' ] || -# The name of the containing directory that 'yarn` uses, which looks like -# $(yarn cache dir)/.tmp/XXXXX -[ "$(basename "$(dirname "$PWD")")" = '.tmp' ] diff --git a/scripts/utils/check-version.cjs b/scripts/utils/check-version.cjs deleted file mode 100644 index 86c56df..0000000 --- a/scripts/utils/check-version.cjs +++ /dev/null @@ -1,20 +0,0 @@ -const fs = require('fs'); -const path = require('path'); - -const main = () => { - const pkg = require('../../package.json'); - const version = pkg['version']; - if (!version) throw 'The version property is not set in the package.json file'; - if (typeof version !== 'string') { - throw `Unexpected type for the package.json version field; got ${typeof version}, expected string`; - } - - const versionFile = path.resolve(__dirname, '..', '..', 'src', 'version.ts'); - const contents = fs.readFileSync(versionFile, 'utf8'); - const output = contents.replace(/(export const VERSION = ')(.*)(')/g, `$1${version}$3`); - fs.writeFileSync(versionFile, output); -}; - -if (require.main === module) { - main(); -} diff --git a/scripts/utils/fix-index-exports.cjs b/scripts/utils/fix-index-exports.cjs deleted file mode 100644 index e5e10b3..0000000 --- a/scripts/utils/fix-index-exports.cjs +++ /dev/null @@ -1,17 +0,0 @@ -const fs = require('fs'); -const path = require('path'); - -const indexJs = - process.env['DIST_PATH'] ? - path.resolve(process.env['DIST_PATH'], 'index.js') - : path.resolve(__dirname, '..', '..', 'dist', 'index.js'); - -let before = fs.readFileSync(indexJs, 'utf8'); -let after = before.replace( - /^(\s*Object\.defineProperty\s*\(exports,\s*["']__esModule["'].+)$/m, - `exports = module.exports = function (...args) { - return new exports.default(...args) - } - $1`.replace(/^ /gm, ''), -); -fs.writeFileSync(indexJs, after, 'utf8'); diff --git a/scripts/utils/git-swap.sh b/scripts/utils/git-swap.sh deleted file mode 100755 index 79d1888..0000000 --- a/scripts/utils/git-swap.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash -set -exuo pipefail -# the package is published to NPM from ./dist -# we want the final file structure for git installs to match the npm installs, so we - -# delete everything except ./dist and ./node_modules -find . -maxdepth 1 -mindepth 1 ! -name 'dist' ! -name 'node_modules' -exec rm -rf '{}' + - -# move everything from ./dist to . -mv dist/* . - -# delete the now-empty ./dist -rmdir dist diff --git a/scripts/utils/make-dist-package-json.cjs b/scripts/utils/make-dist-package-json.cjs deleted file mode 100644 index 7c24f56..0000000 --- a/scripts/utils/make-dist-package-json.cjs +++ /dev/null @@ -1,21 +0,0 @@ -const pkgJson = require(process.env['PKG_JSON_PATH'] || '../../package.json'); - -function processExportMap(m) { - for (const key in m) { - const value = m[key]; - if (typeof value === 'string') m[key] = value.replace(/^\.\/dist\//, './'); - else processExportMap(value); - } -} -processExportMap(pkgJson.exports); - -for (const key of ['types', 'main', 'module']) { - if (typeof pkgJson[key] === 'string') pkgJson[key] = pkgJson[key].replace(/^(\.\/)?dist\//, './'); -} - -delete pkgJson.devDependencies; -delete pkgJson.scripts.prepack; -delete pkgJson.scripts.prepublishOnly; -delete pkgJson.scripts.prepare; - -console.log(JSON.stringify(pkgJson, null, 2)); diff --git a/scripts/utils/postprocess-files.cjs b/scripts/utils/postprocess-files.cjs deleted file mode 100644 index deae575..0000000 --- a/scripts/utils/postprocess-files.cjs +++ /dev/null @@ -1,94 +0,0 @@ -// @ts-check -const fs = require('fs'); -const path = require('path'); - -const distDir = - process.env['DIST_PATH'] ? - path.resolve(process.env['DIST_PATH']) - : path.resolve(__dirname, '..', '..', 'dist'); - -async function* walk(dir) { - for await (const d of await fs.promises.opendir(dir)) { - const entry = path.join(dir, d.name); - if (d.isDirectory()) yield* walk(entry); - else if (d.isFile()) yield entry; - } -} - -async function postprocess() { - for await (const file of walk(distDir)) { - if (!/(\.d)?[cm]?ts$/.test(file)) continue; - - const code = await fs.promises.readFile(file, 'utf8'); - - // strip out lib="dom", types="node", and types="react" references; these - // are needed at build time, but would pollute the user's TS environment - const transformed = code.replace( - /^ *\/\/\/ * ' '.repeat(match.length - 1) + '\n', - ); - - if (transformed !== code) { - console.error(`wrote ${path.relative(process.cwd(), file)}`); - await fs.promises.writeFile(file, transformed, 'utf8'); - } - } - - const newExports = { - '.': { - require: { - types: './index.d.ts', - default: './index.js', - }, - types: './index.d.mts', - default: './index.mjs', - }, - }; - - for (const entry of await fs.promises.readdir(distDir, { withFileTypes: true })) { - if (entry.isDirectory() && entry.name !== 'src' && entry.name !== 'internal' && entry.name !== 'bin') { - const subpath = './' + entry.name; - newExports[subpath + '/*.mjs'] = { - default: subpath + '/*.mjs', - }; - newExports[subpath + '/*.js'] = { - default: subpath + '/*.js', - }; - newExports[subpath + '/*'] = { - import: subpath + '/*.mjs', - require: subpath + '/*.js', - }; - } else if (entry.isFile() && /\.[cm]?js$/.test(entry.name)) { - const { name, ext } = path.parse(entry.name); - const subpathWithoutExt = './' + name; - const subpath = './' + entry.name; - newExports[subpathWithoutExt] ||= { import: undefined, require: undefined }; - const isModule = ext[1] === 'm'; - if (isModule) { - newExports[subpathWithoutExt].import = subpath; - } else { - newExports[subpathWithoutExt].require = subpath; - } - newExports[subpath] = { - default: subpath, - }; - } - } - await fs.promises.writeFile( - 'dist/package.json', - JSON.stringify( - Object.assign( - /** @type {Record} */ ( - JSON.parse(await fs.promises.readFile('dist/package.json', 'utf-8')) - ), - { - exports: newExports, - }, - ), - null, - 2, - ), - ); -} -postprocess(); diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py new file mode 100644 index 0000000..0cf2bd2 --- /dev/null +++ b/scripts/utils/ruffen-docs.py @@ -0,0 +1,167 @@ +# fork of https://github.com/asottile/blacken-docs adapted for ruff +from __future__ import annotations + +import re +import sys +import argparse +import textwrap +import contextlib +import subprocess +from typing import Match, Optional, Sequence, Generator, NamedTuple, cast + +MD_RE = re.compile( + r"(?P^(?P *)```\s*python\n)" r"(?P.*?)" r"(?P^(?P=indent)```\s*$)", + re.DOTALL | re.MULTILINE, +) +MD_PYCON_RE = re.compile( + r"(?P^(?P *)```\s*pycon\n)" r"(?P.*?)" r"(?P^(?P=indent)```.*$)", + re.DOTALL | re.MULTILINE, +) +PYCON_PREFIX = ">>> " +PYCON_CONTINUATION_PREFIX = "..." +PYCON_CONTINUATION_RE = re.compile( + rf"^{re.escape(PYCON_CONTINUATION_PREFIX)}( |$)", +) +DEFAULT_LINE_LENGTH = 100 + + +class CodeBlockError(NamedTuple): + offset: int + exc: Exception + + +def format_str( + src: str, +) -> tuple[str, Sequence[CodeBlockError]]: + errors: list[CodeBlockError] = [] + + @contextlib.contextmanager + def _collect_error(match: Match[str]) -> Generator[None, None, None]: + try: + yield + except Exception as e: + errors.append(CodeBlockError(match.start(), e)) + + def _md_match(match: Match[str]) -> str: + code = textwrap.dedent(match["code"]) + with _collect_error(match): + code = format_code_block(code) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + def _pycon_match(match: Match[str]) -> str: + code = "" + fragment = cast(Optional[str], None) + + def finish_fragment() -> None: + nonlocal code + nonlocal fragment + + if fragment is not None: + with _collect_error(match): + fragment = format_code_block(fragment) + fragment_lines = fragment.splitlines() + code += f"{PYCON_PREFIX}{fragment_lines[0]}\n" + for line in fragment_lines[1:]: + # Skip blank lines to handle Black adding a blank above + # functions within blocks. A blank line would end the REPL + # continuation prompt. + # + # >>> if True: + # ... def f(): + # ... pass + # ... + if line: + code += f"{PYCON_CONTINUATION_PREFIX} {line}\n" + if fragment_lines[-1].startswith(" "): + code += f"{PYCON_CONTINUATION_PREFIX}\n" + fragment = None + + indentation = None + for line in match["code"].splitlines(): + orig_line, line = line, line.lstrip() + if indentation is None and line: + indentation = len(orig_line) - len(line) + continuation_match = PYCON_CONTINUATION_RE.match(line) + if continuation_match and fragment is not None: + fragment += line[continuation_match.end() :] + "\n" + else: + finish_fragment() + if line.startswith(PYCON_PREFIX): + fragment = line[len(PYCON_PREFIX) :] + "\n" + else: + code += orig_line[indentation:] + "\n" + finish_fragment() + return code + + def _md_pycon_match(match: Match[str]) -> str: + code = _pycon_match(match) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + src = MD_RE.sub(_md_match, src) + src = MD_PYCON_RE.sub(_md_pycon_match, src) + return src, errors + + +def format_code_block(code: str) -> str: + return subprocess.check_output( + [ + sys.executable, + "-m", + "ruff", + "format", + "--stdin-filename=script.py", + f"--line-length={DEFAULT_LINE_LENGTH}", + ], + encoding="utf-8", + input=code, + ) + + +def format_file( + filename: str, + skip_errors: bool, +) -> int: + with open(filename, encoding="UTF-8") as f: + contents = f.read() + new_contents, errors = format_str(contents) + for error in errors: + lineno = contents[: error.offset].count("\n") + 1 + print(f"{filename}:{lineno}: code block parse error {error.exc}") + if errors and not skip_errors: + return 1 + if contents != new_contents: + print(f"{filename}: Rewriting...") + with open(filename, "w", encoding="UTF-8") as f: + f.write(new_contents) + return 0 + else: + return 0 + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "-l", + "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, + ) + parser.add_argument( + "-S", + "--skip-string-normalization", + action="store_true", + ) + parser.add_argument("-E", "--skip-errors", action="store_true") + parser.add_argument("filenames", nargs="*") + args = parser.parse_args(argv) + + retv = 0 + for filename in args.filenames: + retv |= format_file(filename, skip_errors=args.skip_errors) + return retv + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index f130283..78ccd91 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -1,7 +1,9 @@ #!/usr/bin/env bash set -exuo pipefail -RESPONSE=$(curl -X POST "$URL" \ +FILENAME=$(basename dist/*.whl) + +RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ -H "Authorization: Bearer $AUTH" \ -H "Content-Type: application/json") @@ -12,15 +14,13 @@ if [[ "$SIGNED_URL" == "null" ]]; then exit 1 fi -TARBALL=$(cd dist && npm pack --silent) - UPLOAD_RESPONSE=$(curl -v -X PUT \ - -H "Content-Type: application/gzip" \ - --data-binary "@dist/$TARBALL" "$SIGNED_URL" 2>&1) + -H "Content-Type: binary/octet-stream" \ + --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" - echo -e "\033[32mInstallation: npm install 'https://pkg.stainless.com/s/brapi-typescript/$SHA'\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/brapi-python/$SHA/$FILENAME'\033[0m" else echo -e "\033[31mFailed to upload artifact.\033[0m" exit 1 diff --git a/src/api-promise.ts b/src/api-promise.ts deleted file mode 100644 index 8c775ee..0000000 --- a/src/api-promise.ts +++ /dev/null @@ -1,2 +0,0 @@ -/** @deprecated Import from ./core/api-promise instead */ -export * from './core/api-promise'; diff --git a/src/brapi/__init__.py b/src/brapi/__init__.py new file mode 100644 index 0000000..d940e9f --- /dev/null +++ b/src/brapi/__init__.py @@ -0,0 +1,104 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import typing as _t + +from . import types +from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given +from ._utils import file_from_path +from ._client import ( + ENVIRONMENTS, + Brapi, + Client, + Stream, + Timeout, + Transport, + AsyncBrapi, + AsyncClient, + AsyncStream, + RequestOptions, +) +from ._models import BaseModel +from ._version import __title__, __version__ +from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse +from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS +from ._exceptions import ( + APIError, + BrapiError, + ConflictError, + NotFoundError, + APIStatusError, + RateLimitError, + APITimeoutError, + BadRequestError, + APIConnectionError, + AuthenticationError, + InternalServerError, + PermissionDeniedError, + UnprocessableEntityError, + APIResponseValidationError, +) +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient +from ._utils._logs import setup_logging as _setup_logging + +__all__ = [ + "types", + "__version__", + "__title__", + "NoneType", + "Transport", + "ProxiesTypes", + "NotGiven", + "NOT_GIVEN", + "not_given", + "Omit", + "omit", + "BrapiError", + "APIError", + "APIStatusError", + "APITimeoutError", + "APIConnectionError", + "APIResponseValidationError", + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", + "Timeout", + "RequestOptions", + "Client", + "AsyncClient", + "Stream", + "AsyncStream", + "Brapi", + "AsyncBrapi", + "ENVIRONMENTS", + "file_from_path", + "BaseModel", + "DEFAULT_TIMEOUT", + "DEFAULT_MAX_RETRIES", + "DEFAULT_CONNECTION_LIMITS", + "DefaultHttpxClient", + "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", +] + +if not _t.TYPE_CHECKING: + from ._utils._resources_proxy import resources as resources + +_setup_logging() + +# Update the __module__ attribute for exported symbols so that +# error messages point to this module instead of the module +# it was originally defined in, e.g. +# brapi._exceptions.NotFoundError -> brapi.NotFoundError +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + try: + __locals[__name].__module__ = "brapi" + except (TypeError, AttributeError): + # Some of our exported symbols are builtins which we can't set attributes for. + pass diff --git a/src/brapi/_base_client.py b/src/brapi/_base_client.py new file mode 100644 index 0000000..2c643d3 --- /dev/null +++ b/src/brapi/_base_client.py @@ -0,0 +1,1995 @@ +from __future__ import annotations + +import sys +import json +import time +import uuid +import email +import asyncio +import inspect +import logging +import platform +import email.utils +from types import TracebackType +from random import random +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Type, + Union, + Generic, + Mapping, + TypeVar, + Iterable, + Iterator, + Optional, + Generator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Literal, override, get_origin + +import anyio +import httpx +import distro +import pydantic +from httpx import URL +from pydantic import PrivateAttr + +from . import _exceptions +from ._qs import Querystring +from ._files import to_httpx_files, async_to_httpx_files +from ._types import ( + Body, + Omit, + Query, + Headers, + Timeout, + NotGiven, + ResponseT, + AnyMapping, + PostParser, + RequestFiles, + HttpxSendArgs, + RequestOptions, + HttpxRequestFiles, + ModelBuilderProtocol, + not_given, +) +from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping +from ._compat import PYDANTIC_V1, model_copy, model_dump +from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type +from ._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + extract_response_type, +) +from ._constants import ( + DEFAULT_TIMEOUT, + MAX_RETRY_DELAY, + DEFAULT_MAX_RETRIES, + INITIAL_RETRY_DELAY, + RAW_RESPONSE_HEADER, + OVERRIDE_CAST_TO_HEADER, + DEFAULT_CONNECTION_LIMITS, +) +from ._streaming import Stream, SSEDecoder, AsyncStream, SSEBytesDecoder +from ._exceptions import ( + APIStatusError, + APITimeoutError, + APIConnectionError, + APIResponseValidationError, +) + +log: logging.Logger = logging.getLogger(__name__) + +# TODO: make base page type vars covariant +SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") +AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +_StreamT = TypeVar("_StreamT", bound=Stream[Any]) +_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) + +if TYPE_CHECKING: + from httpx._config import ( + DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage] + ) + + HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG +else: + try: + from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT + except ImportError: + # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 + HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) + + +class PageInfo: + """Stores the necessary information to build the request to retrieve the next page. + + Either `url` or `params` must be set. + """ + + url: URL | NotGiven + params: Query | NotGiven + json: Body | NotGiven + + @overload + def __init__( + self, + *, + url: URL, + ) -> None: ... + + @overload + def __init__( + self, + *, + params: Query, + ) -> None: ... + + @overload + def __init__( + self, + *, + json: Body, + ) -> None: ... + + def __init__( + self, + *, + url: URL | NotGiven = not_given, + json: Body | NotGiven = not_given, + params: Query | NotGiven = not_given, + ) -> None: + self.url = url + self.json = json + self.params = params + + @override + def __repr__(self) -> str: + if self.url: + return f"{self.__class__.__name__}(url={self.url})" + if self.json: + return f"{self.__class__.__name__}(json={self.json})" + return f"{self.__class__.__name__}(params={self.params})" + + +class BasePage(GenericModel, Generic[_T]): + """ + Defines the core interface for pagination. + + Type Args: + ModelT: The pydantic model that represents an item in the response. + + Methods: + has_next_page(): Check if there is another page available + next_page_info(): Get the necessary information to make a request for the next page + """ + + _options: FinalRequestOptions = PrivateAttr() + _model: Type[_T] = PrivateAttr() + + def has_next_page(self) -> bool: + items = self._get_page_items() + if not items: + return False + return self.next_page_info() is not None + + def next_page_info(self) -> Optional[PageInfo]: ... + + def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] + ... + + def _params_from_url(self, url: URL) -> httpx.QueryParams: + # TODO: do we have to preprocess params here? + return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) + + def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: + options = model_copy(self._options) + options._strip_raw_response_header() + + if not isinstance(info.params, NotGiven): + options.params = {**options.params, **info.params} + return options + + if not isinstance(info.url, NotGiven): + params = self._params_from_url(info.url) + url = info.url.copy_with(params=params) + options.params = dict(url.params) + options.url = str(url) + return options + + if not isinstance(info.json, NotGiven): + if not is_mapping(info.json): + raise TypeError("Pagination is only supported with mappings") + + if not options.json_data: + options.json_data = {**info.json} + else: + if not is_mapping(options.json_data): + raise TypeError("Pagination is only supported with mappings") + + options.json_data = {**options.json_data, **info.json} + return options + + raise ValueError("Unexpected PageInfo state") + + +class BaseSyncPage(BasePage[_T], Generic[_T]): + _client: SyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + client: SyncAPIClient, + model: Type[_T], + options: FinalRequestOptions, + ) -> None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + # Pydantic uses a custom `__iter__` method to support casting BaseModels + # to dictionaries. e.g. dict(model). + # As we want to support `for item in page`, this is inherently incompatible + # with the default pydantic behaviour. It is not possible to support both + # use cases at once. Fortunately, this is not a big deal as all other pydantic + # methods should continue to work as expected as there is an alternative method + # to cast a model to a dictionary, model.dict(), which is used internally + # by pydantic. + def __iter__(self) -> Iterator[_T]: # type: ignore + for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = page.get_next_page() + else: + return + + def get_next_page(self: SyncPageT) -> SyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return self._client._request_api_list(self._model, page=self.__class__, options=options) + + +class AsyncPaginator(Generic[_T, AsyncPageT]): + def __init__( + self, + client: AsyncAPIClient, + options: FinalRequestOptions, + page_cls: Type[AsyncPageT], + model: Type[_T], + ) -> None: + self._model = model + self._client = client + self._options = options + self._page_cls = page_cls + + def __await__(self) -> Generator[Any, None, AsyncPageT]: + return self._get_page().__await__() + + async def _get_page(self) -> AsyncPageT: + def _parser(resp: AsyncPageT) -> AsyncPageT: + resp._set_private_attributes( + model=self._model, + options=self._options, + client=self._client, + ) + return resp + + self._options.post_parser = _parser + + return await self._client.request(self._page_cls, self._options) + + async def __aiter__(self) -> AsyncIterator[_T]: + # https://github.com/microsoft/pyright/issues/3464 + page = cast( + AsyncPageT, + await self, # type: ignore + ) + async for item in page: + yield item + + +class BaseAsyncPage(BasePage[_T], Generic[_T]): + _client: AsyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + model: Type[_T], + client: AsyncAPIClient, + options: FinalRequestOptions, + ) -> None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + async def __aiter__(self) -> AsyncIterator[_T]: + async for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = await page.get_next_page() + else: + return + + async def get_next_page(self: AsyncPageT) -> AsyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return await self._client._request_api_list(self._model, page=self.__class__, options=options) + + +_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) + + +class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): + _client: _HttpxClientT + _version: str + _base_url: URL + max_retries: int + timeout: Union[float, Timeout, None] + _strict_response_validation: bool + _idempotency_header: str | None + _default_stream_cls: type[_DefaultStreamT] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None = DEFAULT_TIMEOUT, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + self._version = version + self._base_url = self._enforce_trailing_slash(URL(base_url)) + self.max_retries = max_retries + self.timeout = timeout + self._custom_headers = custom_headers or {} + self._custom_query = custom_query or {} + self._strict_response_validation = _strict_response_validation + self._idempotency_header = None + self._platform: Platform | None = None + + if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] + raise TypeError( + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `brapi.DEFAULT_MAX_RETRIES`" + ) + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _make_status_error_from_response( + self, + response: httpx.Response, + ) -> APIStatusError: + if response.is_closed and not response.is_stream_consumed: + # We can't read the response body as it has been closed + # before it was read. This can happen if an event hook + # raises a status error. + body = None + err_msg = f"Error code: {response.status_code}" + else: + err_text = response.text.strip() + body = err_text + + try: + body = json.loads(err_text) + err_msg = f"Error code: {response.status_code} - {body}" + except Exception: + err_msg = err_text or f"Error code: {response.status_code}" + + return self._make_status_error(err_msg, body=body, response=response) + + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> _exceptions.APIStatusError: + raise NotImplementedError() + + def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0) -> httpx.Headers: + custom_headers = options.headers or {} + headers_dict = _merge_mappings(self.default_headers, custom_headers) + self._validate_headers(headers_dict, custom_headers) + + # headers are case-insensitive while dictionaries are not. + headers = httpx.Headers(headers_dict) + + idempotency_header = self._idempotency_header + if idempotency_header and options.idempotency_key and idempotency_header not in headers: + headers[idempotency_header] = options.idempotency_key + + # Don't set these headers if they were already set or removed by the caller. We check + # `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case. + lower_custom_headers = [header.lower() for header in custom_headers] + if "x-stainless-retry-count" not in lower_custom_headers: + headers["x-stainless-retry-count"] = str(retries_taken) + if "x-stainless-read-timeout" not in lower_custom_headers: + timeout = self.timeout if isinstance(options.timeout, NotGiven) else options.timeout + if isinstance(timeout, Timeout): + timeout = timeout.read + if timeout is not None: + headers["x-stainless-read-timeout"] = str(timeout) + + return headers + + def _prepare_url(self, url: str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + # Copied from httpx's `_merge_url` method. + merge_url = URL(url) + if merge_url.is_relative_url: + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + + return merge_url + + def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: + return SSEDecoder() + + def _build_request( + self, + options: FinalRequestOptions, + *, + retries_taken: int = 0, + ) -> httpx.Request: + if log.isEnabledFor(logging.DEBUG): + log.debug("Request options: %s", model_dump(options, exclude_unset=True)) + + kwargs: dict[str, Any] = {} + + json_data = options.json_data + if options.extra_json is not None: + if json_data is None: + json_data = cast(Body, options.extra_json) + elif is_mapping(json_data): + json_data = _merge_mappings(json_data, options.extra_json) + else: + raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") + + headers = self._build_headers(options, retries_taken=retries_taken) + params = _merge_mappings(self.default_query, options.params) + content_type = headers.get("Content-Type") + files = options.files + + # If the given Content-Type header is multipart/form-data then it + # has to be removed so that httpx can generate the header with + # additional information for us as it has to be in this form + # for the server to be able to correctly parse the request: + # multipart/form-data; boundary=---abc-- + if content_type is not None and content_type.startswith("multipart/form-data"): + if "boundary" not in content_type: + # only remove the header if the boundary hasn't been explicitly set + # as the caller doesn't want httpx to come up with their own boundary + headers.pop("Content-Type") + + # As we are now sending multipart/form-data instead of application/json + # we need to tell httpx to use it, https://www.python-httpx.org/advanced/clients/#multipart-file-encoding + if json_data: + if not is_dict(json_data): + raise TypeError( + f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead." + ) + kwargs["data"] = self._serialize_multipartform(json_data) + + # httpx determines whether or not to send a "multipart/form-data" + # request based on the truthiness of the "files" argument. + # This gets around that issue by generating a dict value that + # evaluates to true. + # + # https://github.com/encode/httpx/discussions/2399#discussioncomment-3814186 + if not files: + files = cast(HttpxRequestFiles, ForceMultipartDict()) + + prepared_url = self._prepare_url(options.url) + if "_" in prepared_url.host: + # work around https://github.com/encode/httpx/discussions/2880 + kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")} + + is_body_allowed = options.method.lower() != "get" + + if is_body_allowed: + if isinstance(json_data, bytes): + kwargs["content"] = json_data + else: + kwargs["json"] = json_data if is_given(json_data) else None + kwargs["files"] = files + else: + headers.pop("Content-Type", None) + kwargs.pop("data", None) + + # TODO: report this error to httpx + return self._client.build_request( # pyright: ignore[reportUnknownMemberType] + headers=headers, + timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, + method=options.method, + url=prepared_url, + # the `Query` type that we use is incompatible with qs' + # `Params` type as it needs to be typed as `Mapping[str, object]` + # so that passing a `TypedDict` doesn't cause an error. + # https://github.com/microsoft/pyright/issues/3526#event-6715453066 + params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, + **kwargs, + ) + + def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: + items = self.qs.stringify_items( + # TODO: type ignore is required as stringify_items is well typed but we can't be + # well typed without heavy validation. + data, # type: ignore + array_format="brackets", + ) + serialized: dict[str, object] = {} + for key, value in items: + existing = serialized.get(key) + + if not existing: + serialized[key] = value + continue + + # If a value has already been set for this key then that + # means we're sending data like `array[]=[1, 2, 3]` and we + # need to tell httpx that we want to send multiple values with + # the same key which is done by using a list or a tuple. + # + # Note: 2d arrays should never result in the same key at both + # levels so it's safe to assume that if the value is a list, + # it was because we changed it to be a list. + if is_list(existing): + existing.append(value) + else: + serialized[key] = [existing, value] + + return serialized + + def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalRequestOptions) -> type[ResponseT]: + if not is_given(options.headers): + return cast_to + + # make a copy of the headers so we don't mutate user-input + headers = dict(options.headers) + + # we internally support defining a temporary header to override the + # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` + # see _response.py for implementation details + override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given) + if is_given(override_cast_to): + options.headers = headers + return cast(Type[ResponseT], override_cast_to) + + return cast_to + + def _should_stream_response_body(self, request: httpx.Request) -> bool: + return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] + + def _process_response_data( + self, + *, + data: object, + cast_to: type[ResponseT], + response: httpx.Response, + ) -> ResponseT: + if data is None: + return cast(ResponseT, None) + + if cast_to is object: + return cast(ResponseT, data) + + try: + if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol): + return cast(ResponseT, cast_to.build(response=response, data=data)) + + if self._strict_response_validation: + return cast(ResponseT, validate_type(type_=cast_to, value=data)) + + return cast(ResponseT, construct_type(type_=cast_to, value=data)) + except pydantic.ValidationError as err: + raise APIResponseValidationError(response=response, body=data) from err + + @property + def qs(self) -> Querystring: + return Querystring() + + @property + def custom_auth(self) -> httpx.Auth | None: + return None + + @property + def auth_headers(self) -> dict[str, str]: + return {} + + @property + def default_headers(self) -> dict[str, str | Omit]: + return { + "Accept": "application/json", + "Content-Type": "application/json", + "User-Agent": self.user_agent, + **self.platform_headers(), + **self.auth_headers, + **self._custom_headers, + } + + @property + def default_query(self) -> dict[str, object]: + return { + **self._custom_query, + } + + def _validate_headers( + self, + headers: Headers, # noqa: ARG002 + custom_headers: Headers, # noqa: ARG002 + ) -> None: + """Validate the given default headers and custom headers. + + Does nothing by default. + """ + return + + @property + def user_agent(self) -> str: + return f"{self.__class__.__name__}/Python {self._version}" + + @property + def base_url(self) -> URL: + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) + + def platform_headers(self) -> Dict[str, str]: + # the actual implementation is in a separate `lru_cache` decorated + # function because adding `lru_cache` to methods will leak memory + # https://github.com/python/cpython/issues/88476 + return platform_headers(self._version, platform=self._platform) + + def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: + """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. + + About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax + """ + if response_headers is None: + return None + + # First, try the non-standard `retry-after-ms` header for milliseconds, + # which is more precise than integer-seconds `retry-after` + try: + retry_ms_header = response_headers.get("retry-after-ms", None) + return float(retry_ms_header) / 1000 + except (TypeError, ValueError): + pass + + # Next, try parsing `retry-after` header as seconds (allowing nonstandard floats). + retry_header = response_headers.get("retry-after") + try: + # note: the spec indicates that this should only ever be an integer + # but if someone sends a float there's no reason for us to not respect it + return float(retry_header) + except (TypeError, ValueError): + pass + + # Last, try parsing `retry-after` as a date. + retry_date_tuple = email.utils.parsedate_tz(retry_header) + if retry_date_tuple is None: + return None + + retry_date = email.utils.mktime_tz(retry_date_tuple) + return float(retry_date - time.time()) + + def _calculate_retry_timeout( + self, + remaining_retries: int, + options: FinalRequestOptions, + response_headers: Optional[httpx.Headers] = None, + ) -> float: + max_retries = options.get_max_retries(self.max_retries) + + # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. + retry_after = self._parse_retry_after_header(response_headers) + if retry_after is not None and 0 < retry_after <= 60: + return retry_after + + # Also cap retry count to 1000 to avoid any potential overflows with `pow` + nb_retries = min(max_retries - remaining_retries, 1000) + + # Apply exponential backoff, but not more than the max. + sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) + + # Apply some jitter, plus-or-minus half a second. + jitter = 1 - 0.25 * random() + timeout = sleep_seconds * jitter + return timeout if timeout >= 0 else 0 + + def _should_retry(self, response: httpx.Response) -> bool: + # Note: this is not a standard header + should_retry_header = response.headers.get("x-should-retry") + + # If the server explicitly says whether or not to retry, obey. + if should_retry_header == "true": + log.debug("Retrying as header `x-should-retry` is set to `true`") + return True + if should_retry_header == "false": + log.debug("Not retrying as header `x-should-retry` is set to `false`") + return False + + # Retry on request timeouts. + if response.status_code == 408: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on lock timeouts. + if response.status_code == 409: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on rate limits. + if response.status_code == 429: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry internal errors. + if response.status_code >= 500: + log.debug("Retrying due to status code %i", response.status_code) + return True + + log.debug("Not retrying") + return False + + def _idempotency_key(self) -> str: + return f"stainless-python-retry-{uuid.uuid4()}" + + +class _DefaultHttpxClient(httpx.Client): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultHttpxClient = httpx.Client + """An alias to `httpx.Client` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.Client` will result in httpx's defaults being used, not ours. + """ +else: + DefaultHttpxClient = _DefaultHttpxClient + + +class SyncHttpxClientWrapper(DefaultHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + self.close() + except Exception: + pass + + +class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): + _client: httpx.Client + _default_stream_cls: type[Stream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.Client | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + _strict_response_validation: bool, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.Client): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.Client` but got {type(http_client)}" + ) + + super().__init__( + version=version, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + base_url=base_url, + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or SyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + # If an error is thrown while constructing a client, self._client + # may not be present + if hasattr(self, "_client"): + self._client.close() + + def __enter__(self: _T) -> _T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: Type[_StreamT], + ) -> _StreamT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: Type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + err.response.close() + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + err.response.read() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + time.sleep(timeout) + + def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, APIResponse): + raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + ResponseT, + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = APIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return api_response.parse() + + def _request_api_list( + self, + model: Type[object], + page: Type[SyncPageT], + options: FinalRequestOptions, + ) -> SyncPageT: + def _parser(resp: SyncPageT) -> SyncPageT: + resp._set_private_attributes( + client=self, + model=model, + options=options, + ) + return resp + + options.post_parser = _parser + + return self.request(page, options, stream=False) + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + # cast is required because mypy complains about returning Any even though + # it understands the type variables + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return self.request(cast_to, opts) + + def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[object], + page: Type[SyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> SyncPageT: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +class _DefaultAsyncHttpxClient(httpx.AsyncClient): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultAsyncHttpxClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.AsyncClient` will result in httpx's defaults being used, not ours. + """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" +else: + DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient + + +class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + # TODO(someday): support non asyncio runtimes here + asyncio.get_running_loop().create_task(self.aclose()) + except Exception: + pass + + +class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): + _client: httpx.AsyncClient + _default_stream_cls: type[AsyncStream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.AsyncClient | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.AsyncClient): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.AsyncClient` but got {type(http_client)}" + ) + + super().__init__( + version=version, + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or AsyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + async def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + await self._client.aclose() + + async def __aenter__(self: _T) -> _T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + async def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + if self._platform is None: + # `get_platform` can make blocking IO calls so we + # execute it earlier while we are in an async context + self._platform = await asyncify(get_platform)() + + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = await self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + await self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = await self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + await err.response.aclose() + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + await err.response.aread() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return await self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + async def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + await anyio.sleep(timeout) + + async def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, AsyncAPIResponse): + raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + "ResponseT", + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = AsyncAPIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return await api_response.parse() + + def _request_api_list( + self, + model: Type[_T], + page: Type[AsyncPageT], + options: FinalRequestOptions, + ) -> AsyncPaginator[_T, AsyncPageT]: + return AsyncPaginator(client=self, options=options, page_cls=page, model=model) + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + async def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + async def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts) + + async def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[_T], + page: Type[AsyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> AsyncPaginator[_T, AsyncPageT]: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +def make_request_options( + *, + query: Query | None = None, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + idempotency_key: str | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + post_parser: PostParser | NotGiven = not_given, +) -> RequestOptions: + """Create a dict of type RequestOptions without keys of NotGiven values.""" + options: RequestOptions = {} + if extra_headers is not None: + options["headers"] = extra_headers + + if extra_body is not None: + options["extra_json"] = cast(AnyMapping, extra_body) + + if query is not None: + options["params"] = query + + if extra_query is not None: + options["params"] = {**options.get("params", {}), **extra_query} + + if not isinstance(timeout, NotGiven): + options["timeout"] = timeout + + if idempotency_key is not None: + options["idempotency_key"] = idempotency_key + + if is_given(post_parser): + # internal + options["post_parser"] = post_parser # type: ignore + + return options + + +class ForceMultipartDict(Dict[str, None]): + def __bool__(self) -> bool: + return True + + +class OtherPlatform: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"Other:{self.name}" + + +Platform = Union[ + OtherPlatform, + Literal[ + "MacOS", + "Linux", + "Windows", + "FreeBSD", + "OpenBSD", + "iOS", + "Android", + "Unknown", + ], +] + + +def get_platform() -> Platform: + try: + system = platform.system().lower() + platform_name = platform.platform().lower() + except Exception: + return "Unknown" + + if "iphone" in platform_name or "ipad" in platform_name: + # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7 + # system is Darwin and platform_name is a string like: + # - Darwin-21.6.0-iPhone12,1-64bit + # - Darwin-21.6.0-iPad7,11-64bit + return "iOS" + + if system == "darwin": + return "MacOS" + + if system == "windows": + return "Windows" + + if "android" in platform_name: + # Tested using Pydroid 3 + # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc' + return "Android" + + if system == "linux": + # https://distro.readthedocs.io/en/latest/#distro.id + distro_id = distro.id() + if distro_id == "freebsd": + return "FreeBSD" + + if distro_id == "openbsd": + return "OpenBSD" + + return "Linux" + + if platform_name: + return OtherPlatform(platform_name) + + return "Unknown" + + +@lru_cache(maxsize=None) +def platform_headers(version: str, *, platform: Platform | None) -> Dict[str, str]: + return { + "X-Stainless-Lang": "python", + "X-Stainless-Package-Version": version, + "X-Stainless-OS": str(platform or get_platform()), + "X-Stainless-Arch": str(get_architecture()), + "X-Stainless-Runtime": get_python_runtime(), + "X-Stainless-Runtime-Version": get_python_version(), + } + + +class OtherArch: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"other:{self.name}" + + +Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]] + + +def get_python_runtime() -> str: + try: + return platform.python_implementation() + except Exception: + return "unknown" + + +def get_python_version() -> str: + try: + return platform.python_version() + except Exception: + return "unknown" + + +def get_architecture() -> Arch: + try: + machine = platform.machine().lower() + except Exception: + return "unknown" + + if machine in ("arm64", "aarch64"): + return "arm64" + + # TODO: untested + if machine == "arm": + return "arm" + + if machine == "x86_64": + return "x64" + + # TODO: untested + if sys.maxsize <= 2**32: + return "x32" + + if machine: + return OtherArch(machine) + + return "unknown" + + +def _merge_mappings( + obj1: Mapping[_T_co, Union[_T, Omit]], + obj2: Mapping[_T_co, Union[_T, Omit]], +) -> Dict[_T_co, _T]: + """Merge two mappings of the same type, removing any values that are instances of `Omit`. + + In cases with duplicate keys the second mapping takes precedence. + """ + merged = {**obj1, **obj2} + return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/src/brapi/_client.py b/src/brapi/_client.py new file mode 100644 index 0000000..992de0c --- /dev/null +++ b/src/brapi/_client.py @@ -0,0 +1,478 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, Dict, Mapping, cast +from typing_extensions import Self, Literal, override + +import httpx + +from . import _exceptions +from ._qs import Querystring +from ._types import ( + Omit, + Timeout, + NotGiven, + Transport, + ProxiesTypes, + RequestOptions, + not_given, +) +from ._utils import is_given, get_async_library +from ._version import __version__ +from .resources import quote, available +from ._streaming import Stream as Stream, AsyncStream as AsyncStream +from ._exceptions import BrapiError, APIStatusError +from ._base_client import ( + DEFAULT_MAX_RETRIES, + SyncAPIClient, + AsyncAPIClient, +) +from .resources.v2 import v2 + +__all__ = [ + "ENVIRONMENTS", + "Timeout", + "Transport", + "ProxiesTypes", + "RequestOptions", + "Brapi", + "AsyncBrapi", + "Client", + "AsyncClient", +] + +ENVIRONMENTS: Dict[str, str] = { + "production": "https://brapi.dev", + "environment_1": "http://localhost:3000", +} + + +class Brapi(SyncAPIClient): + quote: quote.QuoteResource + available: available.AvailableResource + v2: v2.V2Resource + with_raw_response: BrapiWithRawResponse + with_streaming_response: BrapiWithStreamedResponse + + # client options + api_key: str + + _environment: Literal["production", "environment_1"] | NotGiven + + def __init__( + self, + *, + api_key: str | None = None, + environment: Literal["production", "environment_1"] | NotGiven = not_given, + base_url: str | httpx.URL | None | NotGiven = not_given, + timeout: float | Timeout | None | NotGiven = not_given, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details. + http_client: httpx.Client | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new synchronous Brapi client instance. + + This automatically infers the `api_key` argument from the `BRAPI_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("BRAPI_API_KEY") + if api_key is None: + raise BrapiError( + "The api_key client option must be set either by passing api_key to the client or by setting the BRAPI_API_KEY environment variable" + ) + self.api_key = api_key + + self._environment = environment + + base_url_env = os.environ.get("BRAPI_BASE_URL") + if is_given(base_url) and base_url is not None: + # cast required because mypy doesn't understand the type narrowing + base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast] + elif is_given(environment): + if base_url_env and base_url is not None: + raise ValueError( + "Ambiguous URL; The `BRAPI_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None", + ) + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + elif base_url_env is not None: + base_url = base_url_env + else: + self._environment = environment = "production" + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self.quote = quote.QuoteResource(self) + self.available = available.AvailableResource(self) + self.v2 = v2.V2Resource(self) + self.with_raw_response = BrapiWithRawResponse(self) + self.with_streaming_response = BrapiWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": "false", + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + environment: Literal["production", "environment_1"] | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.Client | None = None, + max_retries: int | NotGiven = not_given, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + environment=environment or self._environment, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class AsyncBrapi(AsyncAPIClient): + quote: quote.AsyncQuoteResource + available: available.AsyncAvailableResource + v2: v2.AsyncV2Resource + with_raw_response: AsyncBrapiWithRawResponse + with_streaming_response: AsyncBrapiWithStreamedResponse + + # client options + api_key: str + + _environment: Literal["production", "environment_1"] | NotGiven + + def __init__( + self, + *, + api_key: str | None = None, + environment: Literal["production", "environment_1"] | NotGiven = not_given, + base_url: str | httpx.URL | None | NotGiven = not_given, + timeout: float | Timeout | None | NotGiven = not_given, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details. + http_client: httpx.AsyncClient | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new async AsyncBrapi client instance. + + This automatically infers the `api_key` argument from the `BRAPI_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("BRAPI_API_KEY") + if api_key is None: + raise BrapiError( + "The api_key client option must be set either by passing api_key to the client or by setting the BRAPI_API_KEY environment variable" + ) + self.api_key = api_key + + self._environment = environment + + base_url_env = os.environ.get("BRAPI_BASE_URL") + if is_given(base_url) and base_url is not None: + # cast required because mypy doesn't understand the type narrowing + base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast] + elif is_given(environment): + if base_url_env and base_url is not None: + raise ValueError( + "Ambiguous URL; The `BRAPI_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None", + ) + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + elif base_url_env is not None: + base_url = base_url_env + else: + self._environment = environment = "production" + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self.quote = quote.AsyncQuoteResource(self) + self.available = available.AsyncAvailableResource(self) + self.v2 = v2.AsyncV2Resource(self) + self.with_raw_response = AsyncBrapiWithRawResponse(self) + self.with_streaming_response = AsyncBrapiWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": f"async:{get_async_library()}", + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + environment: Literal["production", "environment_1"] | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.AsyncClient | None = None, + max_retries: int | NotGiven = not_given, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + environment=environment or self._environment, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class BrapiWithRawResponse: + def __init__(self, client: Brapi) -> None: + self.quote = quote.QuoteResourceWithRawResponse(client.quote) + self.available = available.AvailableResourceWithRawResponse(client.available) + self.v2 = v2.V2ResourceWithRawResponse(client.v2) + + +class AsyncBrapiWithRawResponse: + def __init__(self, client: AsyncBrapi) -> None: + self.quote = quote.AsyncQuoteResourceWithRawResponse(client.quote) + self.available = available.AsyncAvailableResourceWithRawResponse(client.available) + self.v2 = v2.AsyncV2ResourceWithRawResponse(client.v2) + + +class BrapiWithStreamedResponse: + def __init__(self, client: Brapi) -> None: + self.quote = quote.QuoteResourceWithStreamingResponse(client.quote) + self.available = available.AvailableResourceWithStreamingResponse(client.available) + self.v2 = v2.V2ResourceWithStreamingResponse(client.v2) + + +class AsyncBrapiWithStreamedResponse: + def __init__(self, client: AsyncBrapi) -> None: + self.quote = quote.AsyncQuoteResourceWithStreamingResponse(client.quote) + self.available = available.AsyncAvailableResourceWithStreamingResponse(client.available) + self.v2 = v2.AsyncV2ResourceWithStreamingResponse(client.v2) + + +Client = Brapi + +AsyncClient = AsyncBrapi diff --git a/src/brapi/_compat.py b/src/brapi/_compat.py new file mode 100644 index 0000000..bdef67f --- /dev/null +++ b/src/brapi/_compat.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload +from datetime import date, datetime +from typing_extensions import Self, Literal + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import IncEx, StrBytesIntFloat + +_T = TypeVar("_T") +_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) + +# --------------- Pydantic v2, v3 compatibility --------------- + +# Pyright incorrectly reports some of our functions as overriding a method when they don't +# pyright: reportIncompatibleMethodOverride=false + +PYDANTIC_V1 = pydantic.VERSION.startswith("1.") + +if TYPE_CHECKING: + + def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 + ... + + def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001 + ... + + def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001 + ... + + def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001 + ... + + def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001 + ... + + def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001 + ... + + def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 + ... + +else: + # v1 re-exports + if PYDANTIC_V1: + from pydantic.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + else: + from ._utils import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + parse_date as parse_date, + is_typeddict as is_typeddict, + parse_datetime as parse_datetime, + is_literal_type as is_literal_type, + ) + + +# refactored config +if TYPE_CHECKING: + from pydantic import ConfigDict as ConfigDict +else: + if PYDANTIC_V1: + # TODO: provide an error message here? + ConfigDict = None + else: + from pydantic import ConfigDict as ConfigDict + + +# renamed methods / properties +def parse_obj(model: type[_ModelT], value: object) -> _ModelT: + if PYDANTIC_V1: + return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + else: + return model.model_validate(value) + + +def field_is_required(field: FieldInfo) -> bool: + if PYDANTIC_V1: + return field.required # type: ignore + return field.is_required() + + +def field_get_default(field: FieldInfo) -> Any: + value = field.get_default() + if PYDANTIC_V1: + return value + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + + +def field_outer_type(field: FieldInfo) -> Any: + if PYDANTIC_V1: + return field.outer_type_ # type: ignore + return field.annotation + + +def get_model_config(model: type[pydantic.BaseModel]) -> Any: + if PYDANTIC_V1: + return model.__config__ # type: ignore + return model.model_config + + +def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: + if PYDANTIC_V1: + return model.__fields__ # type: ignore + return model.model_fields + + +def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: + if PYDANTIC_V1: + return model.copy(deep=deep) # type: ignore + return model.model_copy(deep=deep) + + +def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: + if PYDANTIC_V1: + return model.json(indent=indent) # type: ignore + return model.model_dump_json(indent=indent) + + +def model_dump( + model: pydantic.BaseModel, + *, + exclude: IncEx | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + warnings: bool = True, + mode: Literal["json", "python"] = "python", +) -> dict[str, Any]: + if (not PYDANTIC_V1) or hasattr(model, "model_dump"): + return model.model_dump( + mode=mode, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + # warnings are not supported in Pydantic v1 + warnings=True if PYDANTIC_V1 else warnings, + ) + return cast( + "dict[str, Any]", + model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + ), + ) + + +def model_parse(model: type[_ModelT], data: Any) -> _ModelT: + if PYDANTIC_V1: + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + return model.model_validate(data) + + +# generic models +if TYPE_CHECKING: + + class GenericModel(pydantic.BaseModel): ... + +else: + if PYDANTIC_V1: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + else: + # there no longer needs to be a distinction in v2 but + # we still have to create our own subclass to avoid + # inconsistent MRO ordering errors + class GenericModel(pydantic.BaseModel): ... + + +# cached properties +if TYPE_CHECKING: + cached_property = property + + # we define a separate type (copied from typeshed) + # that represents that `cached_property` is `set`able + # at runtime, which differs from `@property`. + # + # this is a separate type as editors likely special case + # `@property` and we don't want to cause issues just to have + # more helpful internal types. + + class typed_cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + + def __init__(self, func: Callable[[Any], _T]) -> None: ... + + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... + + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... + + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: + raise NotImplementedError() + + def __set_name__(self, owner: type[Any], name: str) -> None: ... + + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: ... +else: + from functools import cached_property as cached_property + + typed_cached_property = cached_property diff --git a/src/brapi/_constants.py b/src/brapi/_constants.py new file mode 100644 index 0000000..6ddf2c7 --- /dev/null +++ b/src/brapi/_constants.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import httpx + +RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" +OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" + +# default timeout is 1 minute +DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0) +DEFAULT_MAX_RETRIES = 2 +DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) + +INITIAL_RETRY_DELAY = 0.5 +MAX_RETRY_DELAY = 8.0 diff --git a/src/brapi/_exceptions.py b/src/brapi/_exceptions.py new file mode 100644 index 0000000..3aa815f --- /dev/null +++ b/src/brapi/_exceptions.py @@ -0,0 +1,108 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +__all__ = [ + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", +] + + +class BrapiError(Exception): + pass + + +class APIError(BrapiError): + message: str + request: httpx.Request + + body: object | None + """The API response body. + + If the API responded with a valid JSON structure then this property will be the + decoded result. + + If it isn't a valid JSON structure then this will be the raw response. + + If there was no response associated with this error then it will be `None`. + """ + + def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002 + super().__init__(message) + self.request = request + self.message = message + self.body = body + + +class APIResponseValidationError(APIError): + response: httpx.Response + status_code: int + + def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None: + super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIStatusError(APIError): + """Raised when an API response has a status code of 4xx or 5xx.""" + + response: httpx.Response + status_code: int + + def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None: + super().__init__(message, response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIConnectionError(APIError): + def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: + super().__init__(message, request, body=None) + + +class APITimeoutError(APIConnectionError): + def __init__(self, request: httpx.Request) -> None: + super().__init__(message="Request timed out.", request=request) + + +class BadRequestError(APIStatusError): + status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride] + + +class AuthenticationError(APIStatusError): + status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride] + + +class PermissionDeniedError(APIStatusError): + status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride] + + +class NotFoundError(APIStatusError): + status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride] + + +class ConflictError(APIStatusError): + status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride] + + +class UnprocessableEntityError(APIStatusError): + status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride] + + +class RateLimitError(APIStatusError): + status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride] + + +class InternalServerError(APIStatusError): + pass diff --git a/src/brapi/_files.py b/src/brapi/_files.py new file mode 100644 index 0000000..cc14c14 --- /dev/null +++ b/src/brapi/_files.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import io +import os +import pathlib +from typing import overload +from typing_extensions import TypeGuard + +import anyio + +from ._types import ( + FileTypes, + FileContent, + RequestFiles, + HttpxFileTypes, + Base64FileInput, + HttpxFileContent, + HttpxRequestFiles, +) +from ._utils import is_tuple_t, is_mapping_t, is_sequence_t + + +def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: + return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + + +def is_file_content(obj: object) -> TypeGuard[FileContent]: + return ( + isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + ) + + +def assert_is_file_content(obj: object, *, key: str | None = None) -> None: + if not is_file_content(obj): + prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" + raise RuntimeError( + f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead." + ) from None + + +@overload +def to_httpx_files(files: None) -> None: ... + + +@overload +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: _transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, _transform_file(file)) for key, file in files] + else: + raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +def _transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = pathlib.Path(file) + return (path.name, path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +def read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return pathlib.Path(file).read_bytes() + return file + + +@overload +async def async_to_httpx_files(files: None) -> None: ... + + +@overload +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: await _async_transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, await _async_transform_file(file)) for key, file in files] + else: + raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = anyio.Path(file) + return (path.name, await path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], await async_read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +async def async_read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return await anyio.Path(file).read_bytes() + + return file diff --git a/src/brapi/_models.py b/src/brapi/_models.py new file mode 100644 index 0000000..6a3cd1d --- /dev/null +++ b/src/brapi/_models.py @@ -0,0 +1,835 @@ +from __future__ import annotations + +import os +import inspect +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast +from datetime import date, datetime +from typing_extensions import ( + List, + Unpack, + Literal, + ClassVar, + Protocol, + Required, + ParamSpec, + TypedDict, + TypeGuard, + final, + override, + runtime_checkable, +) + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import ( + Body, + IncEx, + Query, + ModelT, + Headers, + Timeout, + NotGiven, + AnyMapping, + HttpxRequestFiles, +) +from ._utils import ( + PropertyInfo, + is_list, + is_given, + json_safe, + lru_cache, + is_mapping, + parse_date, + coerce_boolean, + parse_datetime, + strip_not_given, + extract_type_arg, + is_annotated_type, + is_type_alias_type, + strip_annotated_type, +) +from ._compat import ( + PYDANTIC_V1, + ConfigDict, + GenericModel as BaseGenericModel, + get_args, + is_union, + parse_obj, + get_origin, + is_literal_type, + get_model_config, + get_model_fields, + field_get_default, +) +from ._constants import RAW_RESPONSE_HEADER + +if TYPE_CHECKING: + from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema + +__all__ = ["BaseModel", "GenericModel"] + +_T = TypeVar("_T") +_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") + +P = ParamSpec("P") + + +@runtime_checkable +class _ConfigProtocol(Protocol): + allow_population_by_field_name: bool + + +class BaseModel(pydantic.BaseModel): + if PYDANTIC_V1: + + @property + @override + def model_fields_set(self) -> set[str]: + # a forwards-compat shim for pydantic v2 + return self.__fields_set__ # type: ignore + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + extra: Any = pydantic.Extra.allow # type: ignore + else: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) + + def to_dict( + self, + *, + mode: Literal["json", "python"] = "python", + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> dict[str, object]: + """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + mode: + If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. + If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` + + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. + """ + return self.model_dump( + mode=mode, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + def to_json( + self, + *, + indent: int | None = 2, + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> str: + """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. + """ + return self.model_dump_json( + indent=indent, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + @override + def __str__(self) -> str: + # mypy complains about an invalid self arg + return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc] + + # Override the 'construct' method in a way that supports recursive parsing without validation. + # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. + @classmethod + @override + def construct( # pyright: ignore[reportIncompatibleMethodOverride] + __cls: Type[ModelT], + _fields_set: set[str] | None = None, + **values: object, + ) -> ModelT: + m = __cls.__new__(__cls) + fields_values: dict[str, object] = {} + + config = get_model_config(__cls) + populate_by_name = ( + config.allow_population_by_field_name + if isinstance(config, _ConfigProtocol) + else config.get("populate_by_name") + ) + + if _fields_set is None: + _fields_set = set() + + model_fields = get_model_fields(__cls) + for name, field in model_fields.items(): + key = field.alias + if key is None or (key not in values and populate_by_name): + key = name + + if key in values: + fields_values[name] = _construct_field(value=values[key], field=field, key=key) + _fields_set.add(name) + else: + fields_values[name] = field_get_default(field) + + extra_field_type = _get_extra_fields_type(__cls) + + _extra = {} + for key, value in values.items(): + if key not in model_fields: + parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value + + if PYDANTIC_V1: + _fields_set.add(key) + fields_values[key] = parsed + else: + _extra[key] = parsed + + object.__setattr__(m, "__dict__", fields_values) + + if PYDANTIC_V1: + # init_private_attributes() does not exist in v2 + m._init_private_attributes() # type: ignore + + # copied from Pydantic v1's `construct()` method + object.__setattr__(m, "__fields_set__", _fields_set) + else: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) + + return m + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + # because the type signatures are technically different + # although not in practice + model_construct = construct + + if PYDANTIC_V1: + # we define aliases for some of the new pydantic v2 methods so + # that we can just document these methods without having to specify + # a specific pydantic version as some users may not know which + # pydantic version they are currently using + + @override + def model_dump( + self, + *, + mode: Literal["json", "python"] | str = "python", + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + serialize_as_any: bool = False, + fallback: Callable[[Any], Any] | None = None, + ) -> dict[str, Any]: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump + + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + Args: + mode: The mode in which `to_python` should run. + If mode is 'json', the dictionary will only contain JSON serializable types. + If mode is 'python', the dictionary may contain any Python objects. + include: A list of fields to include in the output. + exclude: A list of fields to exclude from the output. + by_alias: Whether to use the field's alias in the dictionary key if defined. + exclude_unset: Whether to exclude fields that are unset or None from the output. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + round_trip: Whether to enable serialization and deserialization round-trip support. + warnings: Whether to log warnings when invalid fields are encountered. + + Returns: + A dictionary representation of the model. + """ + if mode not in {"json", "python"}: + raise ValueError("mode must be either 'json' or 'python'") + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") + dumped = super().dict( # pyright: ignore[reportDeprecated] + include=include, + exclude=exclude, + by_alias=by_alias if by_alias is not None else False, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped + + @override + def model_dump_json( + self, + *, + indent: int | None = None, + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + fallback: Callable[[Any], Any] | None = None, + serialize_as_any: bool = False, + ) -> str: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json + + Generates a JSON representation of the model using Pydantic's `to_json` method. + + Args: + indent: Indentation to use in the JSON output. If None is passed, the output will be compact. + include: Field(s) to include in the JSON output. Can take either a string or set of strings. + exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. + by_alias: Whether to serialize using field aliases. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: Whether to use serialization/deserialization between JSON and class instance. + warnings: Whether to show any warnings that occurred during serialization. + + Returns: + A JSON string representation of the model. + """ + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") + return super().json( # type: ignore[reportDeprecated] + indent=indent, + include=include, + exclude=exclude, + by_alias=by_alias if by_alias is not None else False, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + +def _construct_field(value: object, field: FieldInfo, key: str) -> object: + if value is None: + return field_get_default(field) + + if PYDANTIC_V1: + type_ = cast(type, field.outer_type_) # type: ignore + else: + type_ = field.annotation # type: ignore + + if type_ is None: + raise RuntimeError(f"Unexpected field type is None for {key}") + + return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None)) + + +def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: + if PYDANTIC_V1: + # TODO + return None + + schema = cls.__pydantic_core_schema__ + if schema["type"] == "model": + fields = schema["schema"] + if fields["type"] == "model-fields": + extras = fields.get("extras_schema") + if extras and "cls" in extras: + # mypy can't narrow the type + return extras["cls"] # type: ignore[no-any-return] + + return None + + +def is_basemodel(type_: type) -> bool: + """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" + if is_union(type_): + for variant in get_args(type_): + if is_basemodel(variant): + return True + + return False + + return is_basemodel_type(type_) + + +def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: + origin = get_origin(type_) or type_ + if not inspect.isclass(origin): + return False + return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) + + +def build( + base_model_cls: Callable[P, _BaseModelT], + *args: P.args, + **kwargs: P.kwargs, +) -> _BaseModelT: + """Construct a BaseModel class without validation. + + This is useful for cases where you need to instantiate a `BaseModel` + from an API response as this provides type-safe params which isn't supported + by helpers like `construct_type()`. + + ```py + build(MyModel, my_field_a="foo", my_field_b=123) + ``` + """ + if args: + raise TypeError( + "Received positional arguments which are not supported; Keyword arguments must be used instead", + ) + + return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) + + +def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: + """Loose coercion to the expected type with construction of nested values. + + Note: the returned value from this function is not guaranteed to match the + given type. + """ + return cast(_T, construct_type(value=value, type_=type_)) + + +def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object: + """Loose coercion to the expected type with construction of nested values. + + If the given value does not match the expected type then it is returned as-is. + """ + + # store a reference to the original type we were given before we extract any inner + # types so that we can properly resolve forward references in `TypeAliasType` annotations + original_type = None + + # we allow `object` as the input type because otherwise, passing things like + # `Literal['value']` will be reported as a type error by type checkers + type_ = cast("type[object]", type_) + if is_type_alias_type(type_): + original_type = type_ # type: ignore[unreachable] + type_ = type_.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if metadata is not None and len(metadata) > 0: + meta: tuple[Any, ...] = tuple(metadata) + elif is_annotated_type(type_): + meta = get_args(type_)[1:] + type_ = extract_type_arg(type_, 0) + else: + meta = tuple() + + # we need to use the origin class for any types that are subscripted generics + # e.g. Dict[str, object] + origin = get_origin(type_) or type_ + args = get_args(type_) + + if is_union(origin): + try: + return validate_type(type_=cast("type[object]", original_type or type_), value=value) + except Exception: + pass + + # if the type is a discriminated union then we want to construct the right variant + # in the union, even if the data doesn't match exactly, otherwise we'd break code + # that relies on the constructed class types, e.g. + # + # class FooType: + # kind: Literal['foo'] + # value: str + # + # class BarType: + # kind: Literal['bar'] + # value: int + # + # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then + # we'd end up constructing `FooType` when it should be `BarType`. + discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) + if discriminator and is_mapping(value): + variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) + if variant_value and isinstance(variant_value, str): + variant_type = discriminator.mapping.get(variant_value) + if variant_type: + return construct_type(type_=variant_type, value=value) + + # if the data is not valid, use the first variant that doesn't fail while deserializing + for variant in args: + try: + return construct_type(value=value, type_=variant) + except Exception: + continue + + raise RuntimeError(f"Could not convert data into a valid instance of {type_}") + + if origin == dict: + if not is_mapping(value): + return value + + _, items_type = get_args(type_) # Dict[_, items_type] + return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} + + if ( + not is_literal_type(type_) + and inspect.isclass(origin) + and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)) + ): + if is_list(value): + return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] + + if is_mapping(value): + if issubclass(type_, BaseModel): + return type_.construct(**value) # type: ignore[arg-type] + + return cast(Any, type_).construct(**value) + + if origin == list: + if not is_list(value): + return value + + inner_type = args[0] # List[inner_type] + return [construct_type(value=entry, type_=inner_type) for entry in value] + + if origin == float: + if isinstance(value, int): + coerced = float(value) + if coerced != value: + return value + return coerced + + return value + + if type_ == datetime: + try: + return parse_datetime(value) # type: ignore + except Exception: + return value + + if type_ == date: + try: + return parse_date(value) # type: ignore + except Exception: + return value + + return value + + +@runtime_checkable +class CachedDiscriminatorType(Protocol): + __discriminator__: DiscriminatorDetails + + +class DiscriminatorDetails: + field_name: str + """The name of the discriminator field in the variant class, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] + ``` + + Will result in field_name='type' + """ + + field_alias_from: str | None + """The name of the discriminator field in the API response, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] = Field(alias='type_from_api') + ``` + + Will result in field_alias_from='type_from_api' + """ + + mapping: dict[str, type] + """Mapping of discriminator value to variant type, e.g. + + {'foo': FooVariant, 'bar': BarVariant} + """ + + def __init__( + self, + *, + mapping: dict[str, type], + discriminator_field: str, + discriminator_alias: str | None, + ) -> None: + self.mapping = mapping + self.field_name = discriminator_field + self.field_alias_from = discriminator_alias + + +def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: + if isinstance(union, CachedDiscriminatorType): + return union.__discriminator__ + + discriminator_field_name: str | None = None + + for annotation in meta_annotations: + if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: + discriminator_field_name = annotation.discriminator + break + + if not discriminator_field_name: + return None + + mapping: dict[str, type] = {} + discriminator_alias: str | None = None + + for variant in get_args(union): + variant = strip_annotated_type(variant) + if is_basemodel_type(variant): + if PYDANTIC_V1: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field_info.alias + + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): + if isinstance(entry, str): + mapping[entry] = variant + else: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field.get("serialization_alias") + + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: + if isinstance(entry, str): + mapping[entry] = variant + + if not mapping: + return None + + details = DiscriminatorDetails( + mapping=mapping, + discriminator_field=discriminator_field_name, + discriminator_alias=discriminator_alias, + ) + cast(CachedDiscriminatorType, union).__discriminator__ = details + return details + + +def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: + schema = model.__pydantic_core_schema__ + if schema["type"] == "definitions": + schema = schema["schema"] + + if schema["type"] != "model": + return None + + schema = cast("ModelSchema", schema) + fields_schema = schema["schema"] + if fields_schema["type"] != "model-fields": + return None + + fields_schema = cast("ModelFieldsSchema", fields_schema) + field = fields_schema["fields"].get(field_name) + if not field: + return None + + return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] + + +def validate_type(*, type_: type[_T], value: object) -> _T: + """Strict validation that the given value matches the expected type""" + if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): + return cast(_T, parse_obj(type_, value)) + + return cast(_T, _validate_non_model_type(type_=type_, value=value)) + + +def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None: + """Add a pydantic config for the given type. + + Note: this is a no-op on Pydantic v1. + """ + setattr(typ, "__pydantic_config__", config) # noqa: B010 + + +# our use of subclassing here causes weirdness for type checkers, +# so we just pretend that we don't subclass +if TYPE_CHECKING: + GenericModel = BaseModel +else: + + class GenericModel(BaseGenericModel, BaseModel): + pass + + +if not PYDANTIC_V1: + from pydantic import TypeAdapter as _TypeAdapter + + _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) + + if TYPE_CHECKING: + from pydantic import TypeAdapter + else: + TypeAdapter = _CachedTypeAdapter + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + return TypeAdapter(type_).validate_python(value) + +elif not TYPE_CHECKING: # TODO: condition is weird + + class RootModel(GenericModel, Generic[_T]): + """Used as a placeholder to easily convert runtime types to a Pydantic format + to provide validation. + + For example: + ```py + validated = RootModel[int](__root__="5").__root__ + # validated: 5 + ``` + """ + + __root__: _T + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + model = _create_pydantic_model(type_).validate(value) + return cast(_T, model.__root__) + + def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: + return RootModel[type_] # type: ignore + + +class FinalRequestOptionsInput(TypedDict, total=False): + method: Required[str] + url: Required[str] + params: Query + headers: Headers + max_retries: int + timeout: float | Timeout | None + files: HttpxRequestFiles | None + idempotency_key: str + json_data: Body + extra_json: AnyMapping + follow_redirects: bool + + +@final +class FinalRequestOptions(pydantic.BaseModel): + method: str + url: str + params: Query = {} + headers: Union[Headers, NotGiven] = NotGiven() + max_retries: Union[int, NotGiven] = NotGiven() + timeout: Union[float, Timeout, None, NotGiven] = NotGiven() + files: Union[HttpxRequestFiles, None] = None + idempotency_key: Union[str, None] = None + post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() + follow_redirects: Union[bool, None] = None + + # It should be noted that we cannot use `json` here as that would override + # a BaseModel method in an incompatible fashion. + json_data: Union[Body, None] = None + extra_json: Union[AnyMapping, None] = None + + if PYDANTIC_V1: + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + arbitrary_types_allowed: bool = True + else: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) + + def get_max_retries(self, max_retries: int) -> int: + if isinstance(self.max_retries, NotGiven): + return max_retries + return self.max_retries + + def _strip_raw_response_header(self) -> None: + if not is_given(self.headers): + return + + if self.headers.get(RAW_RESPONSE_HEADER): + self.headers = {**self.headers} + self.headers.pop(RAW_RESPONSE_HEADER) + + # override the `construct` method so that we can run custom transformations. + # this is necessary as we don't want to do any actual runtime type checking + # (which means we can't use validators) but we do want to ensure that `NotGiven` + # values are not present + # + # type ignore required because we're adding explicit types to `**values` + @classmethod + def construct( # type: ignore + cls, + _fields_set: set[str] | None = None, + **values: Unpack[FinalRequestOptionsInput], + ) -> FinalRequestOptions: + kwargs: dict[str, Any] = { + # we unconditionally call `strip_not_given` on any value + # as it will just ignore any non-mapping types + key: strip_not_given(value) + for key, value in values.items() + } + if PYDANTIC_V1: + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + return super().model_construct(_fields_set, **kwargs) + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + model_construct = construct diff --git a/src/brapi/_qs.py b/src/brapi/_qs.py new file mode 100644 index 0000000..ada6fd3 --- /dev/null +++ b/src/brapi/_qs.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +from typing import Any, List, Tuple, Union, Mapping, TypeVar +from urllib.parse import parse_qs, urlencode +from typing_extensions import Literal, get_args + +from ._types import NotGiven, not_given +from ._utils import flatten + +_T = TypeVar("_T") + + +ArrayFormat = Literal["comma", "repeat", "indices", "brackets"] +NestedFormat = Literal["dots", "brackets"] + +PrimitiveData = Union[str, int, float, bool, None] +# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"] +# https://github.com/microsoft/pyright/issues/3555 +Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"] +Params = Mapping[str, Data] + + +class Querystring: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + *, + array_format: ArrayFormat = "repeat", + nested_format: NestedFormat = "brackets", + ) -> None: + self.array_format = array_format + self.nested_format = nested_format + + def parse(self, query: str) -> Mapping[str, object]: + # Note: custom format syntax is not supported yet + return parse_qs(query) + + def stringify( + self, + params: Params, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> str: + return urlencode( + self.stringify_items( + params, + array_format=array_format, + nested_format=nested_format, + ) + ) + + def stringify_items( + self, + params: Params, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> list[tuple[str, str]]: + opts = Options( + qs=self, + array_format=array_format, + nested_format=nested_format, + ) + return flatten([self._stringify_item(key, value, opts) for key, value in params.items()]) + + def _stringify_item( + self, + key: str, + value: Data, + opts: Options, + ) -> list[tuple[str, str]]: + if isinstance(value, Mapping): + items: list[tuple[str, str]] = [] + nested_format = opts.nested_format + for subkey, subvalue in value.items(): + items.extend( + self._stringify_item( + # TODO: error if unknown format + f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]", + subvalue, + opts, + ) + ) + return items + + if isinstance(value, (list, tuple)): + array_format = opts.array_format + if array_format == "comma": + return [ + ( + key, + ",".join(self._primitive_value_to_str(item) for item in value if item is not None), + ), + ] + elif array_format == "repeat": + items = [] + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + elif array_format == "indices": + raise NotImplementedError("The array indices format is not supported yet") + elif array_format == "brackets": + items = [] + key = key + "[]" + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + else: + raise NotImplementedError( + f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}" + ) + + serialised = self._primitive_value_to_str(value) + if not serialised: + return [] + return [(key, serialised)] + + def _primitive_value_to_str(self, value: PrimitiveData) -> str: + # copied from httpx + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +_qs = Querystring() +parse = _qs.parse +stringify = _qs.stringify +stringify_items = _qs.stringify_items + + +class Options: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + qs: Querystring = _qs, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> None: + self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format + self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/src/brapi/_resource.py b/src/brapi/_resource.py new file mode 100644 index 0000000..57a841c --- /dev/null +++ b/src/brapi/_resource.py @@ -0,0 +1,43 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +import anyio + +if TYPE_CHECKING: + from ._client import Brapi, AsyncBrapi + + +class SyncAPIResource: + _client: Brapi + + def __init__(self, client: Brapi) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + def _sleep(self, seconds: float) -> None: + time.sleep(seconds) + + +class AsyncAPIResource: + _client: AsyncBrapi + + def __init__(self, client: AsyncBrapi) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + async def _sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/src/brapi/_response.py b/src/brapi/_response.py new file mode 100644 index 0000000..0f7b71f --- /dev/null +++ b/src/brapi/_response.py @@ -0,0 +1,830 @@ +from __future__ import annotations + +import os +import inspect +import logging +import datetime +import functools +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Union, + Generic, + TypeVar, + Callable, + Iterator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Awaitable, ParamSpec, override, get_origin + +import anyio +import httpx +import pydantic + +from ._types import NoneType +from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base +from ._models import BaseModel, is_basemodel +from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER +from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type +from ._exceptions import BrapiError, APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import BaseClient + + +P = ParamSpec("P") +R = TypeVar("R") +_T = TypeVar("_T") +_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") +_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]") + +log: logging.Logger = logging.getLogger(__name__) + + +class BaseAPIResponse(Generic[R]): + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed_by_type: dict[type[Any], Any] + _is_sse_stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + retries_taken: int + """The number of retries made. If no retries happened this will be `0`""" + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + retries_taken: int = 0, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed_by_type = {} + self._is_sse_stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + self.retries_taken = retries_taken + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + """Returns the httpx Request instance associated with the current response.""" + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + """Returns the URL for which the request was made.""" + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + @property + def is_closed(self) -> bool: + """Whether or not the response body has been closed. + + If this is False then there is response data that has not been read yet. + You must either fully consume the response body or call `.close()` + before discarding the response to prevent resource leaks. + """ + return self.http_response.is_closed + + @override + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>" + ) + + def _parse(self, *, to: type[_T] | None = None) -> R | _T: + cast_to = to if to is not None else self._cast_to + + # unwrap `TypeAlias('Name', T)` -> `T` + if is_type_alias_type(cast_to): + cast_to = cast_to.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if cast_to and is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) + + origin = get_origin(cast_to) or cast_to + + if self._is_sse_stream: + if to: + if not is_stream_class_type(to): + raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") + + return cast( + _T, + to( + cast_to=extract_stream_chunk_type( + to, + failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", + ), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + if cast_to == bytes: + return cast(R, response.content) + + if cast_to == int: + return cast(R, int(response.text)) + + if cast_to == float: + return cast(R, float(response.text)) + + if cast_to == bool: + return cast(R, response.text.lower() == "true") + + if origin == APIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + if ( + inspect.isclass( + origin # pyright: ignore[reportUnknownArgumentType] + ) + and not issubclass(origin, BaseModel) + and issubclass(origin, pydantic.BaseModel) + ): + raise TypeError("Pydantic models must subclass our base model type, e.g. `from brapi import BaseModel`") + + if ( + cast_to is not object + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type", "*").split(";") + if not content_type.endswith("json"): + if is_basemodel(cast_to): + try: + data = response.json() + except Exception as exc: + log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) + else: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + +class APIResponse(BaseAPIResponse[R]): + @overload + def parse(self, *, to: type[_T]) -> _T: ... + + @overload + def parse(self) -> R: ... + + def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from brapi import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `int` + - `float` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return self.http_response.read() + except httpx.StreamConsumed as exc: + # The default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message. + raise StreamAlreadyConsumed() from exc + + def text(self) -> str: + """Read and decode the response content into a string.""" + self.read() + return self.http_response.text + + def json(self) -> object: + """Read and decode the JSON response content.""" + self.read() + return self.http_response.json() + + def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.http_response.close() + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + for chunk in self.http_response.iter_bytes(chunk_size): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + for chunk in self.http_response.iter_text(chunk_size): + yield chunk + + def iter_lines(self) -> Iterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + for chunk in self.http_response.iter_lines(): + yield chunk + + +class AsyncAPIResponse(BaseAPIResponse[R]): + @overload + async def parse(self, *, to: type[_T]) -> _T: ... + + @overload + async def parse(self) -> R: ... + + async def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from brapi import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + await self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + async def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return await self.http_response.aread() + except httpx.StreamConsumed as exc: + # the default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message + raise StreamAlreadyConsumed() from exc + + async def text(self) -> str: + """Read and decode the response content into a string.""" + await self.read() + return self.http_response.text + + async def json(self) -> object: + """Read and decode the JSON response content.""" + await self.read() + return self.http_response.json() + + async def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.http_response.aclose() + + async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + async for chunk in self.http_response.aiter_bytes(chunk_size): + yield chunk + + async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + async for chunk in self.http_response.aiter_text(chunk_size): + yield chunk + + async def iter_lines(self) -> AsyncIterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + async for chunk in self.http_response.aiter_lines(): + yield chunk + + +class BinaryAPIResponse(APIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(): + f.write(data) + + +class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + async def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(): + await f.write(data) + + +class StreamedBinaryAPIResponse(APIResponse[bytes]): + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(chunk_size): + f.write(data) + + +class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]): + async def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(chunk_size): + await f.write(data) + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `brapi._streaming` for reference", + ) + + +class StreamAlreadyConsumed(BrapiError): + """ + Attempted to read or stream content, but the content has already + been streamed. + + This can happen if you use a method like `.iter_lines()` and then attempt + to read th entire response body afterwards, e.g. + + ```py + response = await client.post(...) + async for line in response.iter_lines(): + ... # do something with `line` + + content = await response.read() + # ^ error + ``` + + If you want this behaviour you'll need to either manually accumulate the response + content or call `await response.read()` before iterating over the stream. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. " + "This could be due to attempting to stream the response " + "content more than once." + "\n\n" + "You can fix this by manually accumulating the response content while streaming " + "or by calling `.read()` before starting to stream." + ) + super().__init__(message) + + +class ResponseContextManager(Generic[_APIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, request_func: Callable[[], _APIResponseT]) -> None: + self._request_func = request_func + self.__response: _APIResponseT | None = None + + def __enter__(self) -> _APIResponseT: + self.__response = self._request_func() + return self.__response + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + self.__response.close() + + +class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None: + self._api_request = api_request + self.__response: _AsyncAPIResponseT | None = None + + async def __aenter__(self) -> _AsyncAPIResponseT: + self.__response = await self._api_request + return self.__response + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + await self.__response.close() + + +def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request)) + + return wrapped + + +def async_to_streamed_response_wrapper( + func: Callable[P, Awaitable[R]], +) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request)) + + return wrapped + + +def to_custom_streamed_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, ResponseContextManager[_APIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request)) + + return wrapped + + +def async_to_custom_streamed_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request)) + + return wrapped + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(AsyncAPIResponse[R], await func(*args, **kwargs)) + + return wrapped + + +def to_custom_raw_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, _APIResponseT]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(_APIResponseT, func(*args, **kwargs)) + + return wrapped + + +def async_to_custom_raw_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, Awaitable[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs)) + + return wrapped + + +def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: + """Given a type like `APIResponse[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(APIResponse[bytes]): + ... + + extract_response_type(MyResponse) -> bytes + ``` + """ + return extract_type_var_from_base( + typ, + generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)), + index=0, + ) diff --git a/src/brapi/_streaming.py b/src/brapi/_streaming.py new file mode 100644 index 0000000..b9d6d1c --- /dev/null +++ b/src/brapi/_streaming.py @@ -0,0 +1,333 @@ +# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py +from __future__ import annotations + +import json +import inspect +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast +from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable + +import httpx + +from ._utils import extract_type_var_from_base + +if TYPE_CHECKING: + from ._client import Brapi, AsyncBrapi + + +_T = TypeVar("_T") + + +class Stream(Generic[_T]): + """Provides the core interface to iterate over a synchronous stream response.""" + + response: httpx.Response + + _decoder: SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: Brapi, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + def __next__(self) -> _T: + return self._iterator.__next__() + + def __iter__(self) -> Iterator[_T]: + for item in self._iterator: + yield item + + def _iter_events(self) -> Iterator[ServerSentEvent]: + yield from self._decoder.iter_bytes(self.response.iter_bytes()) + + def __stream__(self) -> Iterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + for _sse in iterator: + ... + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.response.close() + + +class AsyncStream(Generic[_T]): + """Provides the core interface to iterate over an asynchronous stream response.""" + + response: httpx.Response + + _decoder: SSEDecoder | SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: AsyncBrapi, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + async def __anext__(self) -> _T: + return await self._iterator.__anext__() + + async def __aiter__(self) -> AsyncIterator[_T]: + async for item in self._iterator: + yield item + + async def _iter_events(self) -> AsyncIterator[ServerSentEvent]: + async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()): + yield sse + + async def __stream__(self) -> AsyncIterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + async for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + async for _sse in iterator: + ... + + async def __aenter__(self) -> Self: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.response.aclose() + + +class ServerSentEvent: + def __init__( + self, + *, + event: str | None = None, + data: str | None = None, + id: str | None = None, + retry: int | None = None, + ) -> None: + if data is None: + data = "" + + self._id = id + self._data = data + self._event = event or None + self._retry = retry + + @property + def event(self) -> str | None: + return self._event + + @property + def id(self) -> str | None: + return self._id + + @property + def retry(self) -> int | None: + return self._retry + + @property + def data(self) -> str: + return self._data + + def json(self) -> Any: + return json.loads(self.data) + + @override + def __repr__(self) -> str: + return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" + + +class SSEDecoder: + _data: list[str] + _event: str | None + _retry: int | None + _last_event_id: str | None + + def __init__(self) -> None: + self._event = None + self._data = [] + self._last_event_id = None + self._retry = None + + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + for chunk in self._iter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + async for chunk in self._aiter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + async for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + def decode(self, line: str) -> ServerSentEvent | None: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = None + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None + + +@runtime_checkable +class SSEBytesDecoder(Protocol): + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + +def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]: + """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" + origin = get_origin(typ) or typ + return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream)) + + +def extract_stream_chunk_type( + stream_cls: type, + *, + failure_message: str | None = None, +) -> type: + """Given a type like `Stream[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyStream(Stream[bytes]): + ... + + extract_stream_chunk_type(MyStream) -> bytes + ``` + """ + from ._base_client import Stream, AsyncStream + + return extract_type_var_from_base( + stream_cls, + index=0, + generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)), + failure_message=failure_message, + ) diff --git a/src/brapi/_types.py b/src/brapi/_types.py new file mode 100644 index 0000000..a3e695f --- /dev/null +++ b/src/brapi/_types.py @@ -0,0 +1,260 @@ +from __future__ import annotations + +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + List, + Type, + Tuple, + Union, + Mapping, + TypeVar, + Callable, + Iterator, + Optional, + Sequence, +) +from typing_extensions import ( + Set, + Literal, + Protocol, + TypeAlias, + TypedDict, + SupportsIndex, + overload, + override, + runtime_checkable, +) + +import httpx +import pydantic +from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport + +if TYPE_CHECKING: + from ._models import BaseModel + from ._response import APIResponse, AsyncAPIResponse + +Transport = BaseTransport +AsyncTransport = AsyncBaseTransport +Query = Mapping[str, object] +Body = object +AnyMapping = Mapping[str, object] +ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) +_T = TypeVar("_T") + + +# Approximates httpx internal ProxiesTypes and RequestFiles types +# while adding support for `PathLike` instances +ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] +ProxiesTypes = Union[str, Proxy, ProxiesDict] +if TYPE_CHECKING: + Base64FileInput = Union[IO[bytes], PathLike[str]] + FileContent = Union[IO[bytes], bytes, PathLike[str]] +else: + Base64FileInput = Union[IO[bytes], PathLike] + FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +# duplicate of the above but without our custom file support +HttpxFileContent = Union[IO[bytes], bytes] +HttpxFileTypes = Union[ + # file (or bytes) + HttpxFileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], HttpxFileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], HttpxFileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], +] +HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] + +# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT +# where ResponseT includes `None`. In order to support directly +# passing `None`, overloads would have to be defined for every +# method that uses `ResponseT` which would lead to an unacceptable +# amount of code duplication and make it unreadable. See _base_client.py +# for example usage. +# +# This unfortunately means that you will either have +# to import this type and pass it explicitly: +# +# from brapi import NoneType +# client.get('/foo', cast_to=NoneType) +# +# or build it yourself: +# +# client.get('/foo', cast_to=type(None)) +if TYPE_CHECKING: + NoneType: Type[None] +else: + NoneType = type(None) + + +class RequestOptions(TypedDict, total=False): + headers: Headers + max_retries: int + timeout: float | Timeout | None + params: Query + extra_json: AnyMapping + idempotency_key: str + follow_redirects: bool + + +# Sentinel class used until PEP 0661 is accepted +class NotGiven: + """ + For parameters with a meaningful None value, we need to distinguish between + the user explicitly passing None, and the user not passing the parameter at + all. + + User code shouldn't need to use not_given directly. + + For example: + + ```py + def create(timeout: Timeout | None | NotGiven = not_given): ... + + + create(timeout=1) # 1s timeout + create(timeout=None) # No timeout + create() # Default timeout behavior + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + @override + def __repr__(self) -> str: + return "NOT_GIVEN" + + +not_given = NotGiven() +# for backwards compatibility: +NOT_GIVEN = NotGiven() + + +class Omit: + """ + To explicitly omit something from being sent in a request, use `omit`. + + ```py + # as the default `Content-Type` header is `application/json` that will be sent + client.post("/upload/files", files={"file": b"my raw file content"}) + + # you can't explicitly override the header as it has to be dynamically generated + # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' + client.post(..., headers={"Content-Type": "multipart/form-data"}) + + # instead you can remove the default `application/json` header by passing omit + client.post(..., headers={"Content-Type": omit}) + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + +omit = Omit() + + +@runtime_checkable +class ModelBuilderProtocol(Protocol): + @classmethod + def build( + cls: type[_T], + *, + response: Response, + data: object, + ) -> _T: ... + + +Headers = Mapping[str, Union[str, Omit]] + + +class HeadersLikeProtocol(Protocol): + def get(self, __key: str) -> str | None: ... + + +HeadersLike = Union[Headers, HeadersLikeProtocol] + +ResponseT = TypeVar( + "ResponseT", + bound=Union[ + object, + str, + None, + "BaseModel", + List[Any], + Dict[str, Any], + Response, + ModelBuilderProtocol, + "APIResponse[Any]", + "AsyncAPIResponse[Any]", + ], +) + +StrBytesIntFloat = Union[str, bytes, int, float] + +# Note: copied from Pydantic +# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79 +IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]] + +PostParser = Callable[[Any], Any] + + +@runtime_checkable +class InheritsGeneric(Protocol): + """Represents a type that has inherited from `Generic` + + The `__orig_bases__` property can be used to determine the resolved + type variable for a given base class. + """ + + __orig_bases__: tuple[_GenericAlias] + + +class _GenericAlias(Protocol): + __origin__: type[object] + + +class HttpxSendArgs(TypedDict, total=False): + auth: httpx.Auth + follow_redirects: bool + + +_T_co = TypeVar("_T_co", covariant=True) + + +if TYPE_CHECKING: + # This works because str.__contains__ does not accept object (either in typeshed or at runtime) + # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285 + class SequenceNotStr(Protocol[_T_co]): + @overload + def __getitem__(self, index: SupportsIndex, /) -> _T_co: ... + @overload + def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ... + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T_co]: ... + def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ... + def count(self, value: Any, /) -> int: ... + def __reversed__(self) -> Iterator[_T_co]: ... +else: + # just point this to a normal `Sequence` at runtime to avoid having to special case + # deserializing our custom sequence type + SequenceNotStr = Sequence diff --git a/src/brapi/_utils/__init__.py b/src/brapi/_utils/__init__.py new file mode 100644 index 0000000..dc64e29 --- /dev/null +++ b/src/brapi/_utils/__init__.py @@ -0,0 +1,64 @@ +from ._sync import asyncify as asyncify +from ._proxy import LazyProxy as LazyProxy +from ._utils import ( + flatten as flatten, + is_dict as is_dict, + is_list as is_list, + is_given as is_given, + is_tuple as is_tuple, + json_safe as json_safe, + lru_cache as lru_cache, + is_mapping as is_mapping, + is_tuple_t as is_tuple_t, + is_iterable as is_iterable, + is_sequence as is_sequence, + coerce_float as coerce_float, + is_mapping_t as is_mapping_t, + removeprefix as removeprefix, + removesuffix as removesuffix, + extract_files as extract_files, + is_sequence_t as is_sequence_t, + required_args as required_args, + coerce_boolean as coerce_boolean, + coerce_integer as coerce_integer, + file_from_path as file_from_path, + strip_not_given as strip_not_given, + deepcopy_minimal as deepcopy_minimal, + get_async_library as get_async_library, + maybe_coerce_float as maybe_coerce_float, + get_required_header as get_required_header, + maybe_coerce_boolean as maybe_coerce_boolean, + maybe_coerce_integer as maybe_coerce_integer, +) +from ._compat import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, +) +from ._typing import ( + is_list_type as is_list_type, + is_union_type as is_union_type, + extract_type_arg as extract_type_arg, + is_iterable_type as is_iterable_type, + is_required_type as is_required_type, + is_sequence_type as is_sequence_type, + is_annotated_type as is_annotated_type, + is_type_alias_type as is_type_alias_type, + strip_annotated_type as strip_annotated_type, + extract_type_var_from_base as extract_type_var_from_base, +) +from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator +from ._transform import ( + PropertyInfo as PropertyInfo, + transform as transform, + async_transform as async_transform, + maybe_transform as maybe_transform, + async_maybe_transform as async_maybe_transform, +) +from ._reflection import ( + function_has_argument as function_has_argument, + assert_signatures_in_sync as assert_signatures_in_sync, +) +from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime diff --git a/src/brapi/_utils/_compat.py b/src/brapi/_utils/_compat.py new file mode 100644 index 0000000..dd70323 --- /dev/null +++ b/src/brapi/_utils/_compat.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +import typing_extensions +from typing import Any, Type, Union, Literal, Optional +from datetime import date, datetime +from typing_extensions import get_args as _get_args, get_origin as _get_origin + +from .._types import StrBytesIntFloat +from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime + +_LITERAL_TYPES = {Literal, typing_extensions.Literal} + + +def get_args(tp: type[Any]) -> tuple[Any, ...]: + return _get_args(tp) + + +def get_origin(tp: type[Any]) -> type[Any] | None: + return _get_origin(tp) + + +def is_union(tp: Optional[Type[Any]]) -> bool: + if sys.version_info < (3, 10): + return tp is Union # type: ignore[comparison-overlap] + else: + import types + + return tp is Union or tp is types.UnionType + + +def is_typeddict(tp: Type[Any]) -> bool: + return typing_extensions.is_typeddict(tp) + + +def is_literal_type(tp: Type[Any]) -> bool: + return get_origin(tp) in _LITERAL_TYPES + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + return _parse_date(value) + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + return _parse_datetime(value) diff --git a/src/brapi/_utils/_datetime_parse.py b/src/brapi/_utils/_datetime_parse.py new file mode 100644 index 0000000..7cb9d9e --- /dev/null +++ b/src/brapi/_utils/_datetime_parse.py @@ -0,0 +1,136 @@ +""" +This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py +without the Pydantic v1 specific errors. +""" + +from __future__ import annotations + +import re +from typing import Dict, Union, Optional +from datetime import date, datetime, timezone, timedelta + +from .._types import StrBytesIntFloat + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) + + +def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None + + +def _from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + return timezone(timedelta(minutes=offset)) + else: + return None + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = _get_numeric(value, "datetime") + if number is not None: + return _from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + + match = datetime_re.match(value) + if match is None: + raise ValueError("invalid datetime format") + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo")) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + return datetime(**kw_) # type: ignore + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = _get_numeric(value, "date") + if number is not None: + return _from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + match = date_re.match(value) + if match is None: + raise ValueError("invalid date format") + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise ValueError("invalid date format") from None diff --git a/src/brapi/_utils/_logs.py b/src/brapi/_utils/_logs.py new file mode 100644 index 0000000..ad3f965 --- /dev/null +++ b/src/brapi/_utils/_logs.py @@ -0,0 +1,25 @@ +import os +import logging + +logger: logging.Logger = logging.getLogger("brapi") +httpx_logger: logging.Logger = logging.getLogger("httpx") + + +def _basic_config() -> None: + # e.g. [2023-10-05 14:12:26 - brapi._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def setup_logging() -> None: + env = os.environ.get("BRAPI_LOG") + if env == "debug": + _basic_config() + logger.setLevel(logging.DEBUG) + httpx_logger.setLevel(logging.DEBUG) + elif env == "info": + _basic_config() + logger.setLevel(logging.INFO) + httpx_logger.setLevel(logging.INFO) diff --git a/src/brapi/_utils/_proxy.py b/src/brapi/_utils/_proxy.py new file mode 100644 index 0000000..0f239a3 --- /dev/null +++ b/src/brapi/_utils/_proxy.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar, Iterable, cast +from typing_extensions import override + +T = TypeVar("T") + + +class LazyProxy(Generic[T], ABC): + """Implements data methods to pretend that an instance is another instance. + + This includes forwarding attribute access and other methods. + """ + + # Note: we have to special case proxies that themselves return proxies + # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz` + + def __getattr__(self, attr: str) -> object: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied # pyright: ignore + return getattr(proxied, attr) + + @override + def __repr__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return repr(self.__get_proxied__()) + + @override + def __str__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return str(proxied) + + @override + def __dir__(self) -> Iterable[str]: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return [] + return proxied.__dir__() + + @property # type: ignore + @override + def __class__(self) -> type: # pyright: ignore + try: + proxied = self.__get_proxied__() + except Exception: + return type(self) + if issubclass(type(proxied), LazyProxy): + return type(proxied) + return proxied.__class__ + + def __get_proxied__(self) -> T: + return self.__load__() + + def __as_proxied__(self) -> T: + """Helper method that returns the current proxy, typed as the loaded object""" + return cast(T, self) + + @abstractmethod + def __load__(self) -> T: ... diff --git a/src/brapi/_utils/_reflection.py b/src/brapi/_utils/_reflection.py new file mode 100644 index 0000000..89aa712 --- /dev/null +++ b/src/brapi/_utils/_reflection.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import inspect +from typing import Any, Callable + + +def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: + """Returns whether or not the given function has a specific parameter""" + sig = inspect.signature(func) + return arg_name in sig.parameters + + +def assert_signatures_in_sync( + source_func: Callable[..., Any], + check_func: Callable[..., Any], + *, + exclude_params: set[str] = set(), +) -> None: + """Ensure that the signature of the second function matches the first.""" + + check_sig = inspect.signature(check_func) + source_sig = inspect.signature(source_func) + + errors: list[str] = [] + + for name, source_param in source_sig.parameters.items(): + if name in exclude_params: + continue + + custom_param = check_sig.parameters.get(name) + if not custom_param: + errors.append(f"the `{name}` param is missing") + continue + + if custom_param.annotation != source_param.annotation: + errors.append( + f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" + ) + continue + + if errors: + raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) diff --git a/src/brapi/_utils/_resources_proxy.py b/src/brapi/_utils/_resources_proxy.py new file mode 100644 index 0000000..afea909 --- /dev/null +++ b/src/brapi/_utils/_resources_proxy.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Any +from typing_extensions import override + +from ._proxy import LazyProxy + + +class ResourcesProxy(LazyProxy[Any]): + """A proxy for the `brapi.resources` module. + + This is used so that we can lazily import `brapi.resources` only when + needed *and* so that users can just import `brapi` and reference `brapi.resources` + """ + + @override + def __load__(self) -> Any: + import importlib + + mod = importlib.import_module("brapi.resources") + return mod + + +resources = ResourcesProxy().__as_proxied__() diff --git a/src/brapi/_utils/_streams.py b/src/brapi/_utils/_streams.py new file mode 100644 index 0000000..f4a0208 --- /dev/null +++ b/src/brapi/_utils/_streams.py @@ -0,0 +1,12 @@ +from typing import Any +from typing_extensions import Iterator, AsyncIterator + + +def consume_sync_iterator(iterator: Iterator[Any]) -> None: + for _ in iterator: + ... + + +async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: + async for _ in iterator: + ... diff --git a/src/brapi/_utils/_sync.py b/src/brapi/_utils/_sync.py new file mode 100644 index 0000000..ad7ec71 --- /dev/null +++ b/src/brapi/_utils/_sync.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import sys +import asyncio +import functools +import contextvars +from typing import Any, TypeVar, Callable, Awaitable +from typing_extensions import ParamSpec + +import anyio +import sniffio +import anyio.to_thread + +T_Retval = TypeVar("T_Retval") +T_ParamSpec = ParamSpec("T_ParamSpec") + + +if sys.version_info >= (3, 9): + _asyncio_to_thread = asyncio.to_thread +else: + # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread + # for Python 3.8 support + async def _asyncio_to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs + ) -> Any: + """Asynchronously run function *func* in a separate thread. + + Any *args and **kwargs supplied for this function are directly passed + to *func*. Also, the current :class:`contextvars.Context` is propagated, + allowing context variables from the main thread to be accessed in the + separate thread. + + Returns a coroutine that can be awaited to get the eventual result of *func*. + """ + loop = asyncio.events.get_running_loop() + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) + + +async def to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs +) -> T_Retval: + if sniffio.current_async_library() == "asyncio": + return await _asyncio_to_thread(func, *args, **kwargs) + + return await anyio.to_thread.run_sync( + functools.partial(func, *args, **kwargs), + ) + + +# inspired by `asyncer`, https://github.com/tiangolo/asyncer +def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: + """ + Take a blocking function and create an async one that receives the same + positional and keyword arguments. For python version 3.9 and above, it uses + asyncio.to_thread to run the function in a separate thread. For python version + 3.8, it uses locally defined copy of the asyncio.to_thread function which was + introduced in python 3.9. + + Usage: + + ```python + def blocking_func(arg1, arg2, kwarg1=None): + # blocking code + return result + + + result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1) + ``` + + ## Arguments + + `function`: a blocking regular callable (e.g. a function) + + ## Return + + An async function that takes the same positional and keyword arguments as the + original one, that when called runs the same original function in a thread worker + and returns the result. + """ + + async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: + return await to_thread(function, *args, **kwargs) + + return wrapper diff --git a/src/brapi/_utils/_transform.py b/src/brapi/_utils/_transform.py new file mode 100644 index 0000000..5207549 --- /dev/null +++ b/src/brapi/_utils/_transform.py @@ -0,0 +1,457 @@ +from __future__ import annotations + +import io +import base64 +import pathlib +from typing import Any, Mapping, TypeVar, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints + +import anyio +import pydantic + +from ._utils import ( + is_list, + is_given, + lru_cache, + is_mapping, + is_iterable, + is_sequence, +) +from .._files import is_base64_file_input +from ._compat import get_origin, is_typeddict +from ._typing import ( + is_list_type, + is_union_type, + extract_type_arg, + is_iterable_type, + is_required_type, + is_sequence_type, + is_annotated_type, + strip_annotated_type, +) + +_T = TypeVar("_T") + + +# TODO: support for drilling globals() and locals() +# TODO: ensure works correctly with forward references in all cases + + +PropertyFormat = Literal["iso8601", "base64", "custom"] + + +class PropertyInfo: + """Metadata class to be used in Annotated types to provide information about a given type. + + For example: + + class MyParams(TypedDict): + account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] + + This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. + """ + + alias: str | None + format: PropertyFormat | None + format_template: str | None + discriminator: str | None + + def __init__( + self, + *, + alias: str | None = None, + format: PropertyFormat | None = None, + format_template: str | None = None, + discriminator: str | None = None, + ) -> None: + self.alias = alias + self.format = format + self.format_template = format_template + self.discriminator = discriminator + + @override + def __repr__(self) -> str: + return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" + + +def maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `transform()` that allows `None` to be passed. + + See `transform()` for more details. + """ + if data is None: + return None + return transform(data, expected_type) + + +# Wrapper over _transform_recursive providing fake types +def transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = _transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +@lru_cache(maxsize=8096) +def _get_annotated_type(type_: type) -> type | None: + """If the given type is an `Annotated` type then it is returned, if not `None` is returned. + + This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` + """ + if is_required_type(type_): + # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` + type_ = get_args(type_)[0] + + if is_annotated_type(type_): + return type_ + + return None + + +def _maybe_transform_key(key: str, type_: type) -> str: + """Transform the given `data` based on the annotations provided in `type_`. + + Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata. + """ + annotated_type = _get_annotated_type(type_) + if annotated_type is None: + # no `Annotated` definition for this type, no transformation needed + return key + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.alias is not None: + return annotation.alias + + return key + + +def _no_transform_needed(annotation: type) -> bool: + return annotation == float or annotation == int + + +def _transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + from .._compat import model_dump + + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return _transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = _transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return _format_data(data, annotation.format, annotation.format_template) + + return data + + +def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = data.read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +def _transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include omitted values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) + return result + + +async def async_maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `async_transform()` that allows `None` to be passed. + + See `async_transform()` for more details. + """ + if data is None: + return None + return await async_transform(data, expected_type) + + +async def async_transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +async def _async_transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + from .._compat import model_dump + + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return await _async_transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return await _async_format_data(data, annotation.format, annotation.format_template) + + return data + + +async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = await anyio.Path(data).read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +async def _async_transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include omitted values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) + return result + + +@lru_cache(maxsize=8096) +def get_type_hints( + obj: Any, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: + return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras) diff --git a/src/brapi/_utils/_typing.py b/src/brapi/_utils/_typing.py new file mode 100644 index 0000000..193109f --- /dev/null +++ b/src/brapi/_utils/_typing.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import sys +import typing +import typing_extensions +from typing import Any, TypeVar, Iterable, cast +from collections import abc as _c_abc +from typing_extensions import ( + TypeIs, + Required, + Annotated, + get_args, + get_origin, +) + +from ._utils import lru_cache +from .._types import InheritsGeneric +from ._compat import is_union as _is_union + + +def is_annotated_type(typ: type) -> bool: + return get_origin(typ) == Annotated + + +def is_list_type(typ: type) -> bool: + return (get_origin(typ) or typ) == list + + +def is_sequence_type(typ: type) -> bool: + origin = get_origin(typ) or typ + return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence + + +def is_iterable_type(typ: type) -> bool: + """If the given type is `typing.Iterable[T]`""" + origin = get_origin(typ) or typ + return origin == Iterable or origin == _c_abc.Iterable + + +def is_union_type(typ: type) -> bool: + return _is_union(get_origin(typ)) + + +def is_required_type(typ: type) -> bool: + return get_origin(typ) == Required + + +def is_typevar(typ: type) -> bool: + # type ignore is required because type checkers + # think this expression will always return False + return type(typ) == TypeVar # type: ignore + + +_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,) +if sys.version_info >= (3, 12): + _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType) + + +def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]: + """Return whether the provided argument is an instance of `TypeAliasType`. + + ```python + type Int = int + is_type_alias_type(Int) + # > True + Str = TypeAliasType("Str", str) + is_type_alias_type(Str) + # > True + ``` + """ + return isinstance(tp, _TYPE_ALIAS_TYPES) + + +# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] +@lru_cache(maxsize=8096) +def strip_annotated_type(typ: type) -> type: + if is_required_type(typ) or is_annotated_type(typ): + return strip_annotated_type(cast(type, get_args(typ)[0])) + + return typ + + +def extract_type_arg(typ: type, index: int) -> type: + args = get_args(typ) + try: + return cast(type, args[index]) + except IndexError as err: + raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err + + +def extract_type_var_from_base( + typ: type, + *, + generic_bases: tuple[type, ...], + index: int, + failure_message: str | None = None, +) -> type: + """Given a type like `Foo[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(Foo[bytes]): + ... + + extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes + ``` + + And where a generic subclass is given: + ```py + _T = TypeVar('_T') + class MyResponse(Foo[_T]): + ... + + extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes + ``` + """ + cls = cast(object, get_origin(typ) or typ) + if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains] + # we're given the class directly + return extract_type_arg(typ, index) + + # if a subclass is given + # --- + # this is needed as __orig_bases__ is not present in the typeshed stubs + # because it is intended to be for internal use only, however there does + # not seem to be a way to resolve generic TypeVars for inherited subclasses + # without using it. + if isinstance(cls, InheritsGeneric): + target_base_class: Any | None = None + for base in cls.__orig_bases__: + if base.__origin__ in generic_bases: + target_base_class = base + break + + if target_base_class is None: + raise RuntimeError( + "Could not find the generic base class;\n" + "This should never happen;\n" + f"Does {cls} inherit from one of {generic_bases} ?" + ) + + extracted = extract_type_arg(target_base_class, index) + if is_typevar(extracted): + # If the extracted type argument is itself a type variable + # then that means the subclass itself is generic, so we have + # to resolve the type argument from the class itself, not + # the base class. + # + # Note: if there is more than 1 type argument, the subclass could + # change the ordering of the type arguments, this is not currently + # supported. + return extract_type_arg(typ, index) + + return extracted + + raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/src/brapi/_utils/_utils.py b/src/brapi/_utils/_utils.py new file mode 100644 index 0000000..50d5926 --- /dev/null +++ b/src/brapi/_utils/_utils.py @@ -0,0 +1,421 @@ +from __future__ import annotations + +import os +import re +import inspect +import functools +from typing import ( + Any, + Tuple, + Mapping, + TypeVar, + Callable, + Iterable, + Sequence, + cast, + overload, +) +from pathlib import Path +from datetime import date, datetime +from typing_extensions import TypeGuard + +import sniffio + +from .._types import Omit, NotGiven, FileTypes, HeadersLike + +_T = TypeVar("_T") +_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) +_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) +_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: + return [item for sublist in t for item in sublist] + + +def extract_files( + # TODO: this needs to take Dict but variance issues..... + # create protocol type ? + query: Mapping[str, object], + *, + paths: Sequence[Sequence[str]], +) -> list[tuple[str, FileTypes]]: + """Recursively extract files from the given dictionary based on specified paths. + + A path may look like this ['foo', 'files', '', 'data']. + + Note: this mutates the given dictionary. + """ + files: list[tuple[str, FileTypes]] = [] + for path in paths: + files.extend(_extract_items(query, path, index=0, flattened_key=None)) + return files + + +def _extract_items( + obj: object, + path: Sequence[str], + *, + index: int, + flattened_key: str | None, +) -> list[tuple[str, FileTypes]]: + try: + key = path[index] + except IndexError: + if not is_given(obj): + # no value was provided - we can safely ignore + return [] + + # cyclical import + from .._files import assert_is_file_content + + # We have exhausted the path, return the entry we found. + assert flattened_key is not None + + if is_list(obj): + files: list[tuple[str, FileTypes]] = [] + for entry in obj: + assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "") + files.append((flattened_key + "[]", cast(FileTypes, entry))) + return files + + assert_is_file_content(obj, key=flattened_key) + return [(flattened_key, cast(FileTypes, obj))] + + index += 1 + if is_dict(obj): + try: + # We are at the last entry in the path so we must remove the field + if (len(path)) == index: + item = obj.pop(key) + else: + item = obj[key] + except KeyError: + # Key was not present in the dictionary, this is not indicative of an error + # as the given path may not point to a required field. We also do not want + # to enforce required fields as the API may differ from the spec in some cases. + return [] + if flattened_key is None: + flattened_key = key + else: + flattened_key += f"[{key}]" + return _extract_items( + item, + path, + index=index, + flattened_key=flattened_key, + ) + elif is_list(obj): + if key != "": + return [] + + return flatten( + [ + _extract_items( + item, + path, + index=index, + flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", + ) + for item in obj + ] + ) + + # Something unexpected was passed, just ignore it. + return [] + + +def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]: + return not isinstance(obj, NotGiven) and not isinstance(obj, Omit) + + +# Type safe methods for narrowing types with TypeVars. +# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], +# however this cause Pyright to rightfully report errors. As we know we don't +# care about the contained types we can safely use `object` in it's place. +# +# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. +# `is_*` is for when you're dealing with an unknown input +# `is_*_t` is for when you're narrowing a known union type to a specific subset + + +def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: + return isinstance(obj, tuple) + + +def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: + return isinstance(obj, tuple) + + +def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: + return isinstance(obj, Sequence) + + +def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: + return isinstance(obj, Sequence) + + +def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: + return isinstance(obj, Mapping) + + +def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: + return isinstance(obj, Mapping) + + +def is_dict(obj: object) -> TypeGuard[dict[object, object]]: + return isinstance(obj, dict) + + +def is_list(obj: object) -> TypeGuard[list[object]]: + return isinstance(obj, list) + + +def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: + return isinstance(obj, Iterable) + + +def deepcopy_minimal(item: _T) -> _T: + """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: + + - mappings, e.g. `dict` + - list + + This is done for performance reasons. + """ + if is_mapping(item): + return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) + if is_list(item): + return cast(_T, [deepcopy_minimal(entry) for entry in item]) + return item + + +# copied from https://github.com/Rapptz/RoboDanny +def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: + size = len(seq) + if size == 0: + return "" + + if size == 1: + return seq[0] + + if size == 2: + return f"{seq[0]} {final} {seq[1]}" + + return delim.join(seq[:-1]) + f" {final} {seq[-1]}" + + +def quote(string: str) -> str: + """Add single quotation marks around the given string. Does *not* do any escaping.""" + return f"'{string}'" + + +def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: + """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. + + Useful for enforcing runtime validation of overloaded functions. + + Example usage: + ```py + @overload + def foo(*, a: str) -> str: ... + + + @overload + def foo(*, b: bool) -> str: ... + + + # This enforces the same constraints that a static type checker would + # i.e. that either a or b must be passed to the function + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: bool | None = None) -> str: ... + ``` + """ + + def inner(func: CallableT) -> CallableT: + params = inspect.signature(func).parameters + positional = [ + name + for name, param in params.items() + if param.kind + in { + param.POSITIONAL_ONLY, + param.POSITIONAL_OR_KEYWORD, + } + ] + + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + given_params: set[str] = set() + for i, _ in enumerate(args): + try: + given_params.add(positional[i]) + except IndexError: + raise TypeError( + f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" + ) from None + + for key in kwargs.keys(): + given_params.add(key) + + for variant in variants: + matches = all((param in given_params for param in variant)) + if matches: + break + else: # no break + if len(variants) > 1: + variations = human_join( + ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] + ) + msg = f"Missing required arguments; Expected either {variations} arguments to be given" + else: + assert len(variants) > 0 + + # TODO: this error message is not deterministic + missing = list(set(variants[0]) - given_params) + if len(missing) > 1: + msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" + else: + msg = f"Missing required argument: {quote(missing[0])}" + raise TypeError(msg) + return func(*args, **kwargs) + + return wrapper # type: ignore + + return inner + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +@overload +def strip_not_given(obj: None) -> None: ... + + +@overload +def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... + + +@overload +def strip_not_given(obj: object) -> object: ... + + +def strip_not_given(obj: object | None) -> object: + """Remove all top-level keys where their values are instances of `NotGiven`""" + if obj is None: + return None + + if not is_mapping(obj): + return obj + + return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} + + +def coerce_integer(val: str) -> int: + return int(val, base=10) + + +def coerce_float(val: str) -> float: + return float(val) + + +def coerce_boolean(val: str) -> bool: + return val == "true" or val == "1" or val == "on" + + +def maybe_coerce_integer(val: str | None) -> int | None: + if val is None: + return None + return coerce_integer(val) + + +def maybe_coerce_float(val: str | None) -> float | None: + if val is None: + return None + return coerce_float(val) + + +def maybe_coerce_boolean(val: str | None) -> bool | None: + if val is None: + return None + return coerce_boolean(val) + + +def removeprefix(string: str, prefix: str) -> str: + """Remove a prefix from a string. + + Backport of `str.removeprefix` for Python < 3.9 + """ + if string.startswith(prefix): + return string[len(prefix) :] + return string + + +def removesuffix(string: str, suffix: str) -> str: + """Remove a suffix from a string. + + Backport of `str.removesuffix` for Python < 3.9 + """ + if string.endswith(suffix): + return string[: -len(suffix)] + return string + + +def file_from_path(path: str) -> FileTypes: + contents = Path(path).read_bytes() + file_name = os.path.basename(path) + return (file_name, contents) + + +def get_required_header(headers: HeadersLike, header: str) -> str: + lower_header = header.lower() + if is_mapping_t(headers): + # mypy doesn't understand the type narrowing here + for k, v in headers.items(): # type: ignore + if k.lower() == lower_header and isinstance(v, str): + return v + + # to deal with the case where the header looks like Stainless-Event-Id + intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) + + for normalized_header in [header, lower_header, header.upper(), intercaps_header]: + value = headers.get(normalized_header) + if value: + return value + + raise ValueError(f"Could not find {header} header") + + +def get_async_library() -> str: + try: + return sniffio.current_async_library() + except Exception: + return "false" + + +def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: + """A version of functools.lru_cache that retains the type signature + for the wrapped function arguments. + """ + wrapper = functools.lru_cache( # noqa: TID251 + maxsize=maxsize, + ) + return cast(Any, wrapper) # type: ignore[no-any-return] + + +def json_safe(data: object) -> object: + """Translates a mapping / sequence recursively in the same fashion + as `pydantic` v2's `model_dump(mode="json")`. + """ + if is_mapping(data): + return {json_safe(key): json_safe(value) for key, value in data.items()} + + if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)): + return [json_safe(item) for item in data] + + if isinstance(data, (datetime, date)): + return data.isoformat() + + return data diff --git a/src/brapi/_version.py b/src/brapi/_version.py new file mode 100644 index 0000000..d163e19 --- /dev/null +++ b/src/brapi/_version.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +__title__ = "brapi" +__version__ = "1.0.0" # x-release-please-version diff --git a/src/lib/.keep b/src/brapi/lib/.keep similarity index 81% rename from src/lib/.keep rename to src/brapi/lib/.keep index 7554f8b..5e2c99f 100644 --- a/src/lib/.keep +++ b/src/brapi/lib/.keep @@ -1,4 +1,4 @@ File generated from our OpenAPI spec by Stainless. This directory can be used to store custom files to expand the SDK. -It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/src/brapi/py.typed b/src/brapi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/brapi/resources/__init__.py b/src/brapi/resources/__init__.py new file mode 100644 index 0000000..a4d2d01 --- /dev/null +++ b/src/brapi/resources/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .v2 import ( + V2Resource, + AsyncV2Resource, + V2ResourceWithRawResponse, + AsyncV2ResourceWithRawResponse, + V2ResourceWithStreamingResponse, + AsyncV2ResourceWithStreamingResponse, +) +from .quote import ( + QuoteResource, + AsyncQuoteResource, + QuoteResourceWithRawResponse, + AsyncQuoteResourceWithRawResponse, + QuoteResourceWithStreamingResponse, + AsyncQuoteResourceWithStreamingResponse, +) +from .available import ( + AvailableResource, + AsyncAvailableResource, + AvailableResourceWithRawResponse, + AsyncAvailableResourceWithRawResponse, + AvailableResourceWithStreamingResponse, + AsyncAvailableResourceWithStreamingResponse, +) + +__all__ = [ + "QuoteResource", + "AsyncQuoteResource", + "QuoteResourceWithRawResponse", + "AsyncQuoteResourceWithRawResponse", + "QuoteResourceWithStreamingResponse", + "AsyncQuoteResourceWithStreamingResponse", + "AvailableResource", + "AsyncAvailableResource", + "AvailableResourceWithRawResponse", + "AsyncAvailableResourceWithRawResponse", + "AvailableResourceWithStreamingResponse", + "AsyncAvailableResourceWithStreamingResponse", + "V2Resource", + "AsyncV2Resource", + "V2ResourceWithRawResponse", + "AsyncV2ResourceWithRawResponse", + "V2ResourceWithStreamingResponse", + "AsyncV2ResourceWithStreamingResponse", +] diff --git a/src/brapi/resources/available.py b/src/brapi/resources/available.py new file mode 100644 index 0000000..26e04de --- /dev/null +++ b/src/brapi/resources/available.py @@ -0,0 +1,283 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..types import available_list_params +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.available_list_response import AvailableListResponse + +__all__ = ["AvailableResource", "AsyncAvailableResource"] + + +class AvailableResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> AvailableResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AvailableResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AvailableResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AvailableResourceWithStreamingResponse(self) + + def list( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AvailableListResponse: + """ + Obtenha uma lista completa de todos os tickers (identificadores) de ativos + financeiros (ações, FIIs, BDRs, ETFs, índices) que a API Brapi tem dados + disponíveis para consulta no endpoint `/api/quote/{tickers}`. + + ### Funcionalidade: + + - Retorna arrays separados para `indexes` (índices) e `stocks` (outros ativos). + - Pode ser filtrado usando o parâmetro `search` para encontrar tickers + específicos. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todos os tickers disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/available?token=SEU_TOKEN" + ``` + + **Buscar tickers que contenham 'BBDC':** + + ```bash + curl -X GET "https://brapi.dev/api/available?search=BBDC&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com duas chaves: + + - `indexes`: Array de strings contendo os tickers dos índices disponíveis (ex: + `["^BVSP", "^IFIX"]`). + - `stocks`: Array de strings contendo os tickers das ações, FIIs, BDRs e ETFs + disponíveis (ex: `["PETR4", "VALE3", "ITSA4", "MXRF11"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de tickers (correspondência parcial, + case-insensitive). Se omitido, retorna todos os tickers. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "search": search, + }, + available_list_params.AvailableListParams, + ), + ), + cast_to=AvailableListResponse, + ) + + +class AsyncAvailableResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncAvailableResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncAvailableResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAvailableResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncAvailableResourceWithStreamingResponse(self) + + async def list( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AvailableListResponse: + """ + Obtenha uma lista completa de todos os tickers (identificadores) de ativos + financeiros (ações, FIIs, BDRs, ETFs, índices) que a API Brapi tem dados + disponíveis para consulta no endpoint `/api/quote/{tickers}`. + + ### Funcionalidade: + + - Retorna arrays separados para `indexes` (índices) e `stocks` (outros ativos). + - Pode ser filtrado usando o parâmetro `search` para encontrar tickers + específicos. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todos os tickers disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/available?token=SEU_TOKEN" + ``` + + **Buscar tickers que contenham 'BBDC':** + + ```bash + curl -X GET "https://brapi.dev/api/available?search=BBDC&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com duas chaves: + + - `indexes`: Array de strings contendo os tickers dos índices disponíveis (ex: + `["^BVSP", "^IFIX"]`). + - `stocks`: Array de strings contendo os tickers das ações, FIIs, BDRs e ETFs + disponíveis (ex: `["PETR4", "VALE3", "ITSA4", "MXRF11"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de tickers (correspondência parcial, + case-insensitive). Se omitido, retorna todos os tickers. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "search": search, + }, + available_list_params.AvailableListParams, + ), + ), + cast_to=AvailableListResponse, + ) + + +class AvailableResourceWithRawResponse: + def __init__(self, available: AvailableResource) -> None: + self._available = available + + self.list = to_raw_response_wrapper( + available.list, + ) + + +class AsyncAvailableResourceWithRawResponse: + def __init__(self, available: AsyncAvailableResource) -> None: + self._available = available + + self.list = async_to_raw_response_wrapper( + available.list, + ) + + +class AvailableResourceWithStreamingResponse: + def __init__(self, available: AvailableResource) -> None: + self._available = available + + self.list = to_streamed_response_wrapper( + available.list, + ) + + +class AsyncAvailableResourceWithStreamingResponse: + def __init__(self, available: AsyncAvailableResource) -> None: + self._available = available + + self.list = async_to_streamed_response_wrapper( + available.list, + ) diff --git a/src/brapi/resources/quote.py b/src/brapi/resources/quote.py new file mode 100644 index 0000000..ba428d5 --- /dev/null +++ b/src/brapi/resources/quote.py @@ -0,0 +1,959 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Literal + +import httpx + +from ..types import quote_list_params, quote_retrieve_params +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.quote_list_response import QuoteListResponse +from ..types.quote_retrieve_response import QuoteRetrieveResponse + +__all__ = ["QuoteResource", "AsyncQuoteResource"] + + +class QuoteResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> QuoteResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return QuoteResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> QuoteResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return QuoteResourceWithStreamingResponse(self) + + def retrieve( + self, + tickers: str, + *, + token: str | Omit = omit, + dividends: bool | Omit = omit, + fundamental: bool | Omit = omit, + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + | Omit = omit, + modules: List[ + Literal[ + "summaryProfile", + "balanceSheetHistory", + "defaultKeyStatistics", + "balanceSheetHistoryQuarterly", + "incomeStatementHistory", + "incomeStatementHistoryQuarterly", + "financialData", + "financialDataHistory", + "financialDataHistoryQuarterly", + "defaultKeyStatisticsHistory", + "defaultKeyStatisticsHistoryQuarterly", + "valueAddedHistory", + "valueAddedHistoryQuarterly", + "cashflowHistory", + "cashflowHistoryQuarterly", + ] + ] + | Omit = omit, + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QuoteRetrieveResponse: + """ + Este endpoint é a principal forma de obter informações detalhadas sobre um ou + mais ativos financeiros (ações, FIIs, ETFs, BDRs, índices) listados na B3, + identificados pelos seus respectivos **tickers**. + + ### Funcionalidades Principais: + + - **Cotação Atual:** Retorna o preço mais recente, variação diária, máximas, + mínimas, volume, etc. + - **Dados Históricos:** Permite solicitar séries históricas de preços usando os + parâmetros `range` e `interval`. + - **Dados Fundamentalistas:** Opcionalmente, inclui dados fundamentalistas + básicos (P/L, LPA) com o parâmetro `fundamental=true`. + - **Dividendos:** Opcionalmente, inclui histórico de dividendos e JCP com + `dividends=true`. + - **Módulos Adicionais:** Permite requisitar conjuntos de dados financeiros mais + aprofundados através do parâmetro `modules` (veja detalhes abaixo). + + ### 🧪 Ações de Teste (Sem Autenticação): + + Para facilitar o desenvolvimento e teste, as seguintes **4 ações têm acesso + irrestrito** e **não requerem autenticação**: + + - **PETR4** (Petrobras PN) + - **MGLU3** (Magazine Luiza ON) + - **VALE3** (Vale ON) + - **ITUB4** (Itaú Unibanco PN) + + **Importante:** Você pode consultar essas ações sem token e com acesso a todos + os recursos (históricos, módulos, dividendos). Porém, se misturar essas ações + com outras na mesma requisição, a autenticação será obrigatória. + + ### Autenticação: + + Para **outras ações** (além das 4 de teste), é **obrigatório** fornecer um token + de autenticação válido, seja via query parameter `token` ou via header + `Authorization: Bearer seu_token`. + + ### Exemplos de Requisição: + + **1. Cotação simples de PETR4 e VALE3 (ações de teste - sem token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/PETR4,VALE3" + ``` + + **2. Cotação de MGLU3 com dados históricos do último mês (ação de teste - sem + token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/MGLU3?range=1mo&interval=1d" + ``` + + **3. Cotação de ITUB4 incluindo dividendos e dados fundamentalistas (ação de + teste - sem token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/ITUB4?fundamental=true÷nds=true" + ``` + + **4. Cotação de WEGE3 com Resumo da Empresa e Balanço Patrimonial Anual (via + módulos - requer token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/WEGE3?modules=summaryProfile,balanceSheetHistory&token=SEU_TOKEN" + ``` + + **5. Exemplo de requisição mista (requer token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/PETR4,BBAS3?token=SEU_TOKEN" + ``` + + _Nota: Como BBAS3 não é uma ação de teste, toda a requisição requer + autenticação, mesmo contendo PETR4._ + + ### Parâmetro `modules` (Detalhado): + + O parâmetro `modules` é extremamente poderoso para enriquecer a resposta com + dados financeiros detalhados. Você pode solicitar um ou mais módulos, separados + por vírgula. + + **Módulos Disponíveis:** + + - `summaryProfile`: Informações cadastrais da empresa (endereço, setor, + descrição do negócio, website, número de funcionários). + - `balanceSheetHistory`: Histórico **anual** do Balanço Patrimonial. + - `balanceSheetHistoryQuarterly`: Histórico **trimestral** do Balanço + Patrimonial. + - `defaultKeyStatistics`: Principais estatísticas da empresa (Valor de Mercado, + P/L, ROE, Dividend Yield, etc.) - **TTM (Trailing Twelve Months)**. + - `defaultKeyStatisticsHistory`: Histórico **anual** das Principais + Estatísticas. + - `defaultKeyStatisticsHistoryQuarterly`: Histórico **trimestral** das + Principais Estatísticas. + - `incomeStatementHistory`: Histórico **anual** da Demonstração do Resultado do + Exercício (DRE). + - `incomeStatementHistoryQuarterly`: Histórico **trimestral** da Demonstração do + Resultado do Exercício (DRE). + - `financialData`: Dados financeiros selecionados (Receita, Lucro Bruto, EBITDA, + Dívida Líquida, Fluxo de Caixa Livre, Margens) - **TTM (Trailing Twelve + Months)**. + - `financialDataHistory`: Histórico **anual** dos Dados Financeiros. + - `financialDataHistoryQuarterly`: Histórico **trimestral** dos Dados + Financeiros. + - `valueAddedHistory`: Histórico **anual** da Demonstração do Valor Adicionado + (DVA). + - `valueAddedHistoryQuarterly`: Histórico **trimestral** da Demonstração do + Valor Adicionado (DVA). + - `cashflowHistory`: Histórico **anual** da Demonstração do Fluxo de Caixa + (DFC). + - `cashflowHistoryQuarterly`: Histórico **trimestral** da Demonstração do Fluxo + de Caixa (DFC). + + **Exemplo de Uso do `modules`:** + + Para obter a cotação de BBDC4 junto com seu DRE trimestral e Fluxo de Caixa + anual: + + ```bash + curl -X GET "https://brapi.dev/api/quote/BBDC4?modules=incomeStatementHistoryQuarterly,cashflowHistory&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON contendo a chave `results`, que é um array. Cada + elemento do array corresponde a um ticker solicitado e contém os dados da + cotação e os módulos adicionais requisitados. + + - **Sucesso (200 OK):** Retorna os dados conforme solicitado. + - **Bad Request (400 Bad Request):** Ocorre se um parâmetro for inválido (ex: + `range=invalid`) ou se a formatação estiver incorreta. + - **Unauthorized (401 Unauthorized):** Token inválido ou ausente. + - **Payment Required (402 Payment Required):** Limite de requisições do plano + atual excedido. + - **Not Found (404 Not Found):** Um ou mais tickers solicitados não foram + encontrados. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + dividends: **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui informações sobre + dividendos e JCP (Juros sobre Capital Próprio) pagos historicamente pelo ativo + na chave `dividendsData`. + + fundamental: **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui dados + fundamentalistas básicos na resposta, como Preço/Lucro (P/L) e Lucro Por Ação + (LPA). + + **Nota:** Para dados fundamentalistas mais completos, utilize o parâmetro + `modules`. + + interval: **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. + + **Valores Possíveis:** + + - `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`: Intervalos intraday + (minutos/horas). **Atenção:** Disponibilidade pode variar conforme o `range` e + o ativo. + - `1d`: Diário (padrão se `range` for especificado e `interval` omitido). + - `5d`: 5 dias. + - `1wk`: Semanal. + - `1mo`: Mensal. + - `3mo`: Trimestral. + + modules: **Opcional.** Uma lista de módulos de dados adicionais, separados por vírgula + (`,`), para incluir na resposta. Permite buscar dados financeiros detalhados. + + **Exemplos:** + + - `modules=summaryProfile` (retorna perfil da empresa) + - `modules=balanceSheetHistory,incomeStatementHistory` (retorna histórico anual + do BP e DRE) + + Veja a descrição principal do endpoint para a lista completa de módulos e seus + conteúdos. + + range: **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Se omitido, apenas a cotação mais recente é retornada + (a menos que `interval` seja usado). + + **Valores Possíveis:** + + - `1d`: Último dia de pregão (intraday se `interval` for minutos/horas). + - `5d`: Últimos 5 dias. + - `1mo`: Último mês. + - `3mo`: Últimos 3 meses. + - `6mo`: Últimos 6 meses. + - `1y`: Último ano. + - `2y`: Últimos 2 anos. + - `5y`: Últimos 5 anos. + - `10y`: Últimos 10 anos. + - `ytd`: Desde o início do ano atual (Year-to-Date). + - `max`: Todo o período histórico disponível. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tickers: + raise ValueError(f"Expected a non-empty value for `tickers` but received {tickers!r}") + return self._get( + f"/api/quote/{tickers}", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "dividends": dividends, + "fundamental": fundamental, + "interval": interval, + "modules": modules, + "range": range, + }, + quote_retrieve_params.QuoteRetrieveParams, + ), + ), + cast_to=QuoteRetrieveResponse, + ) + + def list( + self, + *, + token: str | Omit = omit, + limit: int | Omit = omit, + page: int | Omit = omit, + search: str | Omit = omit, + sector: Literal[ + "Retail Trade", + "Energy Minerals", + "Health Services", + "Utilities", + "Finance", + "Consumer Services", + "Consumer Non-Durables", + "Non-Energy Minerals", + "Commercial Services", + "Distribution Services", + "Transportation", + "Technology Services", + "Process Industries", + "Communications", + "Producer Manufacturing", + "Miscellaneous", + "Electronic Technology", + "Industrial Services", + "Health Technology", + "Consumer Durables", + ] + | Omit = omit, + sort_by: Literal["name", "close", "change", "change_abs", "volume", "market_cap_basic", "sector"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + type: Literal["stock", "fund", "bdr"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QuoteListResponse: + """ + Obtenha uma lista paginada de cotações de diversos ativos (ações, FIIs, BDRs) + negociados na B3, com opções avançadas de busca, filtragem e ordenação. + + ### Funcionalidades: + + - **Busca por Ticker:** Filtre por parte do ticker usando `search`. + - **Filtragem por Tipo:** Restrinja a lista a `stock`, `fund` (FII) ou `bdr` com + o parâmetro `type`. + - **Filtragem por Setor:** Selecione ativos de um setor específico usando + `sector`. + - **Ordenação:** Ordene os resultados por diversos campos (preço, variação, + volume, etc.) usando `sortBy` e `sortOrder`. + - **Paginação:** Controle o número de resultados por página (`limit`) e a página + desejada (`page`). + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar as 10 ações do setor Financeiro com maior volume, ordenadas de forma + decrescente:** + + ```bash + curl -X GET "https://brapi.dev/api/quote/list?sector=Finance&sortBy=volume&sortOrder=desc&limit=10&page=1&token=SEU_TOKEN" + ``` + + **Buscar por ativos cujo ticker contenha 'ITUB' e ordenar por nome ascendente:** + + ```bash + curl -X GET "https://brapi.dev/api/quote/list?search=ITUB&sortBy=name&sortOrder=asc&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém a lista de `stocks` (e `indexes` relevantes), informações + sobre os filtros aplicados, detalhes da paginação (`currentPage`, `totalPages`, + `itemsPerPage`, `totalCount`, `hasNextPage`) e listas de setores + (`availableSectors`) e tipos (`availableStockTypes`) disponíveis para filtragem. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + limit: **Opcional.** Número máximo de ativos a serem retornados por página. O valor + padrão pode variar. + + page: **Opcional.** Número da página dos resultados a ser retornada, considerando o + `limit` especificado. Começa em 1. + + search: + **Opcional.** Termo para buscar ativos por ticker (correspondência parcial). Ex: + `PETR` encontrará `PETR4`, `PETR3`. + + sector: **Opcional.** Filtra os resultados por setor de atuação da empresa. Utilize um + dos valores retornados em `availableSectors`. + + sort_by: **Opcional.** Campo pelo qual os resultados serão ordenados. + + sort_order: **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + Requer que `sortBy` seja especificado. + + type: **Opcional.** Filtra os resultados por tipo de ativo. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/quote/list", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "limit": limit, + "page": page, + "search": search, + "sector": sector, + "sort_by": sort_by, + "sort_order": sort_order, + "type": type, + }, + quote_list_params.QuoteListParams, + ), + ), + cast_to=QuoteListResponse, + ) + + +class AsyncQuoteResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncQuoteResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncQuoteResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncQuoteResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncQuoteResourceWithStreamingResponse(self) + + async def retrieve( + self, + tickers: str, + *, + token: str | Omit = omit, + dividends: bool | Omit = omit, + fundamental: bool | Omit = omit, + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + | Omit = omit, + modules: List[ + Literal[ + "summaryProfile", + "balanceSheetHistory", + "defaultKeyStatistics", + "balanceSheetHistoryQuarterly", + "incomeStatementHistory", + "incomeStatementHistoryQuarterly", + "financialData", + "financialDataHistory", + "financialDataHistoryQuarterly", + "defaultKeyStatisticsHistory", + "defaultKeyStatisticsHistoryQuarterly", + "valueAddedHistory", + "valueAddedHistoryQuarterly", + "cashflowHistory", + "cashflowHistoryQuarterly", + ] + ] + | Omit = omit, + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QuoteRetrieveResponse: + """ + Este endpoint é a principal forma de obter informações detalhadas sobre um ou + mais ativos financeiros (ações, FIIs, ETFs, BDRs, índices) listados na B3, + identificados pelos seus respectivos **tickers**. + + ### Funcionalidades Principais: + + - **Cotação Atual:** Retorna o preço mais recente, variação diária, máximas, + mínimas, volume, etc. + - **Dados Históricos:** Permite solicitar séries históricas de preços usando os + parâmetros `range` e `interval`. + - **Dados Fundamentalistas:** Opcionalmente, inclui dados fundamentalistas + básicos (P/L, LPA) com o parâmetro `fundamental=true`. + - **Dividendos:** Opcionalmente, inclui histórico de dividendos e JCP com + `dividends=true`. + - **Módulos Adicionais:** Permite requisitar conjuntos de dados financeiros mais + aprofundados através do parâmetro `modules` (veja detalhes abaixo). + + ### 🧪 Ações de Teste (Sem Autenticação): + + Para facilitar o desenvolvimento e teste, as seguintes **4 ações têm acesso + irrestrito** e **não requerem autenticação**: + + - **PETR4** (Petrobras PN) + - **MGLU3** (Magazine Luiza ON) + - **VALE3** (Vale ON) + - **ITUB4** (Itaú Unibanco PN) + + **Importante:** Você pode consultar essas ações sem token e com acesso a todos + os recursos (históricos, módulos, dividendos). Porém, se misturar essas ações + com outras na mesma requisição, a autenticação será obrigatória. + + ### Autenticação: + + Para **outras ações** (além das 4 de teste), é **obrigatório** fornecer um token + de autenticação válido, seja via query parameter `token` ou via header + `Authorization: Bearer seu_token`. + + ### Exemplos de Requisição: + + **1. Cotação simples de PETR4 e VALE3 (ações de teste - sem token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/PETR4,VALE3" + ``` + + **2. Cotação de MGLU3 com dados históricos do último mês (ação de teste - sem + token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/MGLU3?range=1mo&interval=1d" + ``` + + **3. Cotação de ITUB4 incluindo dividendos e dados fundamentalistas (ação de + teste - sem token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/ITUB4?fundamental=true÷nds=true" + ``` + + **4. Cotação de WEGE3 com Resumo da Empresa e Balanço Patrimonial Anual (via + módulos - requer token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/WEGE3?modules=summaryProfile,balanceSheetHistory&token=SEU_TOKEN" + ``` + + **5. Exemplo de requisição mista (requer token):** + + ```bash + curl -X GET "https://brapi.dev/api/quote/PETR4,BBAS3?token=SEU_TOKEN" + ``` + + _Nota: Como BBAS3 não é uma ação de teste, toda a requisição requer + autenticação, mesmo contendo PETR4._ + + ### Parâmetro `modules` (Detalhado): + + O parâmetro `modules` é extremamente poderoso para enriquecer a resposta com + dados financeiros detalhados. Você pode solicitar um ou mais módulos, separados + por vírgula. + + **Módulos Disponíveis:** + + - `summaryProfile`: Informações cadastrais da empresa (endereço, setor, + descrição do negócio, website, número de funcionários). + - `balanceSheetHistory`: Histórico **anual** do Balanço Patrimonial. + - `balanceSheetHistoryQuarterly`: Histórico **trimestral** do Balanço + Patrimonial. + - `defaultKeyStatistics`: Principais estatísticas da empresa (Valor de Mercado, + P/L, ROE, Dividend Yield, etc.) - **TTM (Trailing Twelve Months)**. + - `defaultKeyStatisticsHistory`: Histórico **anual** das Principais + Estatísticas. + - `defaultKeyStatisticsHistoryQuarterly`: Histórico **trimestral** das + Principais Estatísticas. + - `incomeStatementHistory`: Histórico **anual** da Demonstração do Resultado do + Exercício (DRE). + - `incomeStatementHistoryQuarterly`: Histórico **trimestral** da Demonstração do + Resultado do Exercício (DRE). + - `financialData`: Dados financeiros selecionados (Receita, Lucro Bruto, EBITDA, + Dívida Líquida, Fluxo de Caixa Livre, Margens) - **TTM (Trailing Twelve + Months)**. + - `financialDataHistory`: Histórico **anual** dos Dados Financeiros. + - `financialDataHistoryQuarterly`: Histórico **trimestral** dos Dados + Financeiros. + - `valueAddedHistory`: Histórico **anual** da Demonstração do Valor Adicionado + (DVA). + - `valueAddedHistoryQuarterly`: Histórico **trimestral** da Demonstração do + Valor Adicionado (DVA). + - `cashflowHistory`: Histórico **anual** da Demonstração do Fluxo de Caixa + (DFC). + - `cashflowHistoryQuarterly`: Histórico **trimestral** da Demonstração do Fluxo + de Caixa (DFC). + + **Exemplo de Uso do `modules`:** + + Para obter a cotação de BBDC4 junto com seu DRE trimestral e Fluxo de Caixa + anual: + + ```bash + curl -X GET "https://brapi.dev/api/quote/BBDC4?modules=incomeStatementHistoryQuarterly,cashflowHistory&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON contendo a chave `results`, que é um array. Cada + elemento do array corresponde a um ticker solicitado e contém os dados da + cotação e os módulos adicionais requisitados. + + - **Sucesso (200 OK):** Retorna os dados conforme solicitado. + - **Bad Request (400 Bad Request):** Ocorre se um parâmetro for inválido (ex: + `range=invalid`) ou se a formatação estiver incorreta. + - **Unauthorized (401 Unauthorized):** Token inválido ou ausente. + - **Payment Required (402 Payment Required):** Limite de requisições do plano + atual excedido. + - **Not Found (404 Not Found):** Um ou mais tickers solicitados não foram + encontrados. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + dividends: **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui informações sobre + dividendos e JCP (Juros sobre Capital Próprio) pagos historicamente pelo ativo + na chave `dividendsData`. + + fundamental: **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui dados + fundamentalistas básicos na resposta, como Preço/Lucro (P/L) e Lucro Por Ação + (LPA). + + **Nota:** Para dados fundamentalistas mais completos, utilize o parâmetro + `modules`. + + interval: **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. + + **Valores Possíveis:** + + - `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`: Intervalos intraday + (minutos/horas). **Atenção:** Disponibilidade pode variar conforme o `range` e + o ativo. + - `1d`: Diário (padrão se `range` for especificado e `interval` omitido). + - `5d`: 5 dias. + - `1wk`: Semanal. + - `1mo`: Mensal. + - `3mo`: Trimestral. + + modules: **Opcional.** Uma lista de módulos de dados adicionais, separados por vírgula + (`,`), para incluir na resposta. Permite buscar dados financeiros detalhados. + + **Exemplos:** + + - `modules=summaryProfile` (retorna perfil da empresa) + - `modules=balanceSheetHistory,incomeStatementHistory` (retorna histórico anual + do BP e DRE) + + Veja a descrição principal do endpoint para a lista completa de módulos e seus + conteúdos. + + range: **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Se omitido, apenas a cotação mais recente é retornada + (a menos que `interval` seja usado). + + **Valores Possíveis:** + + - `1d`: Último dia de pregão (intraday se `interval` for minutos/horas). + - `5d`: Últimos 5 dias. + - `1mo`: Último mês. + - `3mo`: Últimos 3 meses. + - `6mo`: Últimos 6 meses. + - `1y`: Último ano. + - `2y`: Últimos 2 anos. + - `5y`: Últimos 5 anos. + - `10y`: Últimos 10 anos. + - `ytd`: Desde o início do ano atual (Year-to-Date). + - `max`: Todo o período histórico disponível. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tickers: + raise ValueError(f"Expected a non-empty value for `tickers` but received {tickers!r}") + return await self._get( + f"/api/quote/{tickers}", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "dividends": dividends, + "fundamental": fundamental, + "interval": interval, + "modules": modules, + "range": range, + }, + quote_retrieve_params.QuoteRetrieveParams, + ), + ), + cast_to=QuoteRetrieveResponse, + ) + + async def list( + self, + *, + token: str | Omit = omit, + limit: int | Omit = omit, + page: int | Omit = omit, + search: str | Omit = omit, + sector: Literal[ + "Retail Trade", + "Energy Minerals", + "Health Services", + "Utilities", + "Finance", + "Consumer Services", + "Consumer Non-Durables", + "Non-Energy Minerals", + "Commercial Services", + "Distribution Services", + "Transportation", + "Technology Services", + "Process Industries", + "Communications", + "Producer Manufacturing", + "Miscellaneous", + "Electronic Technology", + "Industrial Services", + "Health Technology", + "Consumer Durables", + ] + | Omit = omit, + sort_by: Literal["name", "close", "change", "change_abs", "volume", "market_cap_basic", "sector"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + type: Literal["stock", "fund", "bdr"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QuoteListResponse: + """ + Obtenha uma lista paginada de cotações de diversos ativos (ações, FIIs, BDRs) + negociados na B3, com opções avançadas de busca, filtragem e ordenação. + + ### Funcionalidades: + + - **Busca por Ticker:** Filtre por parte do ticker usando `search`. + - **Filtragem por Tipo:** Restrinja a lista a `stock`, `fund` (FII) ou `bdr` com + o parâmetro `type`. + - **Filtragem por Setor:** Selecione ativos de um setor específico usando + `sector`. + - **Ordenação:** Ordene os resultados por diversos campos (preço, variação, + volume, etc.) usando `sortBy` e `sortOrder`. + - **Paginação:** Controle o número de resultados por página (`limit`) e a página + desejada (`page`). + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar as 10 ações do setor Financeiro com maior volume, ordenadas de forma + decrescente:** + + ```bash + curl -X GET "https://brapi.dev/api/quote/list?sector=Finance&sortBy=volume&sortOrder=desc&limit=10&page=1&token=SEU_TOKEN" + ``` + + **Buscar por ativos cujo ticker contenha 'ITUB' e ordenar por nome ascendente:** + + ```bash + curl -X GET "https://brapi.dev/api/quote/list?search=ITUB&sortBy=name&sortOrder=asc&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém a lista de `stocks` (e `indexes` relevantes), informações + sobre os filtros aplicados, detalhes da paginação (`currentPage`, `totalPages`, + `itemsPerPage`, `totalCount`, `hasNextPage`) e listas de setores + (`availableSectors`) e tipos (`availableStockTypes`) disponíveis para filtragem. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + limit: **Opcional.** Número máximo de ativos a serem retornados por página. O valor + padrão pode variar. + + page: **Opcional.** Número da página dos resultados a ser retornada, considerando o + `limit` especificado. Começa em 1. + + search: + **Opcional.** Termo para buscar ativos por ticker (correspondência parcial). Ex: + `PETR` encontrará `PETR4`, `PETR3`. + + sector: **Opcional.** Filtra os resultados por setor de atuação da empresa. Utilize um + dos valores retornados em `availableSectors`. + + sort_by: **Opcional.** Campo pelo qual os resultados serão ordenados. + + sort_order: **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + Requer que `sortBy` seja especificado. + + type: **Opcional.** Filtra os resultados por tipo de ativo. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/quote/list", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "limit": limit, + "page": page, + "search": search, + "sector": sector, + "sort_by": sort_by, + "sort_order": sort_order, + "type": type, + }, + quote_list_params.QuoteListParams, + ), + ), + cast_to=QuoteListResponse, + ) + + +class QuoteResourceWithRawResponse: + def __init__(self, quote: QuoteResource) -> None: + self._quote = quote + + self.retrieve = to_raw_response_wrapper( + quote.retrieve, + ) + self.list = to_raw_response_wrapper( + quote.list, + ) + + +class AsyncQuoteResourceWithRawResponse: + def __init__(self, quote: AsyncQuoteResource) -> None: + self._quote = quote + + self.retrieve = async_to_raw_response_wrapper( + quote.retrieve, + ) + self.list = async_to_raw_response_wrapper( + quote.list, + ) + + +class QuoteResourceWithStreamingResponse: + def __init__(self, quote: QuoteResource) -> None: + self._quote = quote + + self.retrieve = to_streamed_response_wrapper( + quote.retrieve, + ) + self.list = to_streamed_response_wrapper( + quote.list, + ) + + +class AsyncQuoteResourceWithStreamingResponse: + def __init__(self, quote: AsyncQuoteResource) -> None: + self._quote = quote + + self.retrieve = async_to_streamed_response_wrapper( + quote.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + quote.list, + ) diff --git a/src/brapi/resources/v2/__init__.py b/src/brapi/resources/v2/__init__.py new file mode 100644 index 0000000..e6c512e --- /dev/null +++ b/src/brapi/resources/v2/__init__.py @@ -0,0 +1,75 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .v2 import ( + V2Resource, + AsyncV2Resource, + V2ResourceWithRawResponse, + AsyncV2ResourceWithRawResponse, + V2ResourceWithStreamingResponse, + AsyncV2ResourceWithStreamingResponse, +) +from .crypto import ( + CryptoResource, + AsyncCryptoResource, + CryptoResourceWithRawResponse, + AsyncCryptoResourceWithRawResponse, + CryptoResourceWithStreamingResponse, + AsyncCryptoResourceWithStreamingResponse, +) +from .currency import ( + CurrencyResource, + AsyncCurrencyResource, + CurrencyResourceWithRawResponse, + AsyncCurrencyResourceWithRawResponse, + CurrencyResourceWithStreamingResponse, + AsyncCurrencyResourceWithStreamingResponse, +) +from .inflation import ( + InflationResource, + AsyncInflationResource, + InflationResourceWithRawResponse, + AsyncInflationResourceWithRawResponse, + InflationResourceWithStreamingResponse, + AsyncInflationResourceWithStreamingResponse, +) +from .prime_rate import ( + PrimeRateResource, + AsyncPrimeRateResource, + PrimeRateResourceWithRawResponse, + AsyncPrimeRateResourceWithRawResponse, + PrimeRateResourceWithStreamingResponse, + AsyncPrimeRateResourceWithStreamingResponse, +) + +__all__ = [ + "CryptoResource", + "AsyncCryptoResource", + "CryptoResourceWithRawResponse", + "AsyncCryptoResourceWithRawResponse", + "CryptoResourceWithStreamingResponse", + "AsyncCryptoResourceWithStreamingResponse", + "CurrencyResource", + "AsyncCurrencyResource", + "CurrencyResourceWithRawResponse", + "AsyncCurrencyResourceWithRawResponse", + "CurrencyResourceWithStreamingResponse", + "AsyncCurrencyResourceWithStreamingResponse", + "InflationResource", + "AsyncInflationResource", + "InflationResourceWithRawResponse", + "AsyncInflationResourceWithRawResponse", + "InflationResourceWithStreamingResponse", + "AsyncInflationResourceWithStreamingResponse", + "PrimeRateResource", + "AsyncPrimeRateResource", + "PrimeRateResourceWithRawResponse", + "AsyncPrimeRateResourceWithRawResponse", + "PrimeRateResourceWithStreamingResponse", + "AsyncPrimeRateResourceWithStreamingResponse", + "V2Resource", + "AsyncV2Resource", + "V2ResourceWithRawResponse", + "AsyncV2ResourceWithRawResponse", + "V2ResourceWithStreamingResponse", + "AsyncV2ResourceWithStreamingResponse", +] diff --git a/src/brapi/resources/v2/crypto.py b/src/brapi/resources/v2/crypto.py new file mode 100644 index 0000000..317e8d9 --- /dev/null +++ b/src/brapi/resources/v2/crypto.py @@ -0,0 +1,524 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ...types.v2 import crypto_retrieve_params, crypto_list_available_params +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.v2.crypto_retrieve_response import CryptoRetrieveResponse +from ...types.v2.crypto_list_available_response import CryptoListAvailableResponse + +__all__ = ["CryptoResource", "AsyncCryptoResource"] + + +class CryptoResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CryptoResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return CryptoResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CryptoResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return CryptoResourceWithStreamingResponse(self) + + def retrieve( + self, + *, + coin: str, + token: str | Omit = omit, + currency: str | Omit = omit, + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + | Omit = omit, + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CryptoRetrieveResponse: + """ + Obtenha cotações atualizadas e dados históricos para uma ou mais criptomoedas. + + ### Funcionalidades: + + - **Cotação Múltipla:** Consulte várias criptomoedas em uma única requisição + usando o parâmetro `coin`. + - **Moeda de Referência:** Especifique a moeda fiduciária para a cotação com + `currency` (padrão: BRL). + - **Dados Históricos:** Solicite séries históricas usando `range` e `interval` + (similar ao endpoint de ações). + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Cotação de Bitcoin (BTC) e Ethereum (ETH) em Dólar Americano (USD):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto?coin=BTC,ETH¤cy=USD&token=SEU_TOKEN" + ``` + + **Cotação de Cardano (ADA) em Real (BRL) com histórico do último mês (intervalo + diário):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto?coin=ADA¤cy=BRL&range=1mo&interval=1d&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém um array `coins`, onde cada objeto representa uma criptomoeda + solicitada, incluindo sua cotação atual, dados de mercado e, opcionalmente, a + série histórica (`historicalDataPrice`). + + Args: + coin: **Obrigatório.** Uma ou mais siglas (tickers) de criptomoedas que você deseja + consultar. Separe múltiplas siglas por vírgula (`,`). + + - **Exemplos:** `BTC`, `ETH,ADA`, `SOL`. + + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + currency: **Opcional.** A sigla da moeda fiduciária na qual a cotação da(s) criptomoeda(s) + deve ser retornada. Se omitido, o padrão é `BRL` (Real Brasileiro). + + interval: **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. Funciona + de forma análoga ao endpoint de ações. + + - Valores: `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`, `1d`, `5d`, + `1wk`, `1mo`, `3mo`. + + range: **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Funciona de forma análoga ao endpoint de ações. Se + omitido, apenas a cotação mais recente é retornada (a menos que `interval` seja + usado). + + - Valores: `1d`, `5d`, `1mo`, `3mo`, `6mo`, `1y`, `2y`, `5y`, `10y`, `ytd`, + `max`. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/crypto", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "coin": coin, + "token": token, + "currency": currency, + "interval": interval, + "range": range, + }, + crypto_retrieve_params.CryptoRetrieveParams, + ), + ), + cast_to=CryptoRetrieveResponse, + ) + + def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CryptoListAvailableResponse: + """ + Obtenha a lista completa de todas as siglas (tickers) de criptomoedas que a API + Brapi suporta para consulta no endpoint `/api/v2/crypto`. + + ### Funcionalidade: + + - Retorna um array `coins` com as siglas. + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todas as criptomoedas disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto/available?token=SEU_TOKEN" + ``` + + **Buscar criptomoedas cujo ticker contenha 'DOGE':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto/available?search=DOGE&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `coins`, contendo um array de strings + com as siglas das criptomoedas (ex: `["BTC", "ETH", "LTC", "XRP"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de siglas de criptomoedas + (correspondência parcial, case-insensitive). Se omitido, retorna todas as + siglas. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/crypto/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "search": search, + }, + crypto_list_available_params.CryptoListAvailableParams, + ), + ), + cast_to=CryptoListAvailableResponse, + ) + + +class AsyncCryptoResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCryptoResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncCryptoResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCryptoResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncCryptoResourceWithStreamingResponse(self) + + async def retrieve( + self, + *, + coin: str, + token: str | Omit = omit, + currency: str | Omit = omit, + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + | Omit = omit, + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CryptoRetrieveResponse: + """ + Obtenha cotações atualizadas e dados históricos para uma ou mais criptomoedas. + + ### Funcionalidades: + + - **Cotação Múltipla:** Consulte várias criptomoedas em uma única requisição + usando o parâmetro `coin`. + - **Moeda de Referência:** Especifique a moeda fiduciária para a cotação com + `currency` (padrão: BRL). + - **Dados Históricos:** Solicite séries históricas usando `range` e `interval` + (similar ao endpoint de ações). + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Cotação de Bitcoin (BTC) e Ethereum (ETH) em Dólar Americano (USD):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto?coin=BTC,ETH¤cy=USD&token=SEU_TOKEN" + ``` + + **Cotação de Cardano (ADA) em Real (BRL) com histórico do último mês (intervalo + diário):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto?coin=ADA¤cy=BRL&range=1mo&interval=1d&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém um array `coins`, onde cada objeto representa uma criptomoeda + solicitada, incluindo sua cotação atual, dados de mercado e, opcionalmente, a + série histórica (`historicalDataPrice`). + + Args: + coin: **Obrigatório.** Uma ou mais siglas (tickers) de criptomoedas que você deseja + consultar. Separe múltiplas siglas por vírgula (`,`). + + - **Exemplos:** `BTC`, `ETH,ADA`, `SOL`. + + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + currency: **Opcional.** A sigla da moeda fiduciária na qual a cotação da(s) criptomoeda(s) + deve ser retornada. Se omitido, o padrão é `BRL` (Real Brasileiro). + + interval: **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. Funciona + de forma análoga ao endpoint de ações. + + - Valores: `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`, `1d`, `5d`, + `1wk`, `1mo`, `3mo`. + + range: **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Funciona de forma análoga ao endpoint de ações. Se + omitido, apenas a cotação mais recente é retornada (a menos que `interval` seja + usado). + + - Valores: `1d`, `5d`, `1mo`, `3mo`, `6mo`, `1y`, `2y`, `5y`, `10y`, `ytd`, + `max`. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/crypto", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "coin": coin, + "token": token, + "currency": currency, + "interval": interval, + "range": range, + }, + crypto_retrieve_params.CryptoRetrieveParams, + ), + ), + cast_to=CryptoRetrieveResponse, + ) + + async def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CryptoListAvailableResponse: + """ + Obtenha a lista completa de todas as siglas (tickers) de criptomoedas que a API + Brapi suporta para consulta no endpoint `/api/v2/crypto`. + + ### Funcionalidade: + + - Retorna um array `coins` com as siglas. + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todas as criptomoedas disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto/available?token=SEU_TOKEN" + ``` + + **Buscar criptomoedas cujo ticker contenha 'DOGE':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/crypto/available?search=DOGE&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `coins`, contendo um array de strings + com as siglas das criptomoedas (ex: `["BTC", "ETH", "LTC", "XRP"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de siglas de criptomoedas + (correspondência parcial, case-insensitive). Se omitido, retorna todas as + siglas. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/crypto/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "search": search, + }, + crypto_list_available_params.CryptoListAvailableParams, + ), + ), + cast_to=CryptoListAvailableResponse, + ) + + +class CryptoResourceWithRawResponse: + def __init__(self, crypto: CryptoResource) -> None: + self._crypto = crypto + + self.retrieve = to_raw_response_wrapper( + crypto.retrieve, + ) + self.list_available = to_raw_response_wrapper( + crypto.list_available, + ) + + +class AsyncCryptoResourceWithRawResponse: + def __init__(self, crypto: AsyncCryptoResource) -> None: + self._crypto = crypto + + self.retrieve = async_to_raw_response_wrapper( + crypto.retrieve, + ) + self.list_available = async_to_raw_response_wrapper( + crypto.list_available, + ) + + +class CryptoResourceWithStreamingResponse: + def __init__(self, crypto: CryptoResource) -> None: + self._crypto = crypto + + self.retrieve = to_streamed_response_wrapper( + crypto.retrieve, + ) + self.list_available = to_streamed_response_wrapper( + crypto.list_available, + ) + + +class AsyncCryptoResourceWithStreamingResponse: + def __init__(self, crypto: AsyncCryptoResource) -> None: + self._crypto = crypto + + self.retrieve = async_to_streamed_response_wrapper( + crypto.retrieve, + ) + self.list_available = async_to_streamed_response_wrapper( + crypto.list_available, + ) diff --git a/src/brapi/resources/v2/currency.py b/src/brapi/resources/v2/currency.py new file mode 100644 index 0000000..2195980 --- /dev/null +++ b/src/brapi/resources/v2/currency.py @@ -0,0 +1,456 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ...types.v2 import currency_retrieve_params, currency_list_available_params +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.v2.currency_retrieve_response import CurrencyRetrieveResponse +from ...types.v2.currency_list_available_response import CurrencyListAvailableResponse + +__all__ = ["CurrencyResource", "AsyncCurrencyResource"] + + +class CurrencyResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CurrencyResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return CurrencyResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CurrencyResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return CurrencyResourceWithStreamingResponse(self) + + def retrieve( + self, + *, + currency: str, + token: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CurrencyRetrieveResponse: + """ + Obtenha cotações atualizadas para um ou mais pares de moedas fiduciárias (ex: + USD-BRL, EUR-USD). + + ### Funcionalidades: + + - **Cotação Múltipla:** Consulte vários pares de moedas em uma única requisição + usando o parâmetro `currency`. + - **Dados Retornados:** Inclui nome do par, preços de compra (bid) e venda + (ask), variação, máximas e mínimas, e timestamp da atualização. + + ### Parâmetros: + + - **`currency` (Obrigatório):** Uma lista de pares de moedas separados por + vírgula, no formato `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`, `EUR-USD`). + Consulte os pares disponíveis em + [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). + - **`token` (Obrigatório):** Seu token de autenticação. + + ### Autenticação: + + Requer token de autenticação válido via `token` (query) ou `Authorization` + (header). + + Args: + currency: **Obrigatório.** Uma lista de um ou mais pares de moedas a serem consultados, + separados por vírgula (`,`). + + - **Formato:** `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`). + - **Disponibilidade:** Consulte os pares válidos usando o endpoint + [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). + - **Exemplo:** `USD-BRL,EUR-BRL,BTC-BRL` + + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/currency", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "currency": currency, + "token": token, + }, + currency_retrieve_params.CurrencyRetrieveParams, + ), + ), + cast_to=CurrencyRetrieveResponse, + ) + + def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CurrencyListAvailableResponse: + """ + Obtenha a lista completa de todas as moedas fiduciárias suportadas pela API, + geralmente utilizadas no parâmetro `currency` de outros endpoints (como o de + criptomoedas) ou para futuras funcionalidades de conversão. + + ### Funcionalidade: + + - Retorna um array `currencies` com os nomes das moedas. + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todas as moedas disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/currency/available?token=SEU_TOKEN" + ``` + + **Buscar moedas cujo nome contenha 'Euro':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/currency/available?search=Euro&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `currencies`, contendo um array de + objetos. Cada objeto possui uma chave `currency` com o nome completo da moeda + (ex: `"Dólar Americano/Real Brasileiro"`). **Nota:** O formato do nome pode + indicar um par de moedas, dependendo do contexto interno da API. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista pelo nome da moeda (correspondência + parcial, case-insensitive). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/currency/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "search": search, + }, + currency_list_available_params.CurrencyListAvailableParams, + ), + ), + cast_to=CurrencyListAvailableResponse, + ) + + +class AsyncCurrencyResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCurrencyResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncCurrencyResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCurrencyResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncCurrencyResourceWithStreamingResponse(self) + + async def retrieve( + self, + *, + currency: str, + token: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CurrencyRetrieveResponse: + """ + Obtenha cotações atualizadas para um ou mais pares de moedas fiduciárias (ex: + USD-BRL, EUR-USD). + + ### Funcionalidades: + + - **Cotação Múltipla:** Consulte vários pares de moedas em uma única requisição + usando o parâmetro `currency`. + - **Dados Retornados:** Inclui nome do par, preços de compra (bid) e venda + (ask), variação, máximas e mínimas, e timestamp da atualização. + + ### Parâmetros: + + - **`currency` (Obrigatório):** Uma lista de pares de moedas separados por + vírgula, no formato `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`, `EUR-USD`). + Consulte os pares disponíveis em + [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). + - **`token` (Obrigatório):** Seu token de autenticação. + + ### Autenticação: + + Requer token de autenticação válido via `token` (query) ou `Authorization` + (header). + + Args: + currency: **Obrigatório.** Uma lista de um ou mais pares de moedas a serem consultados, + separados por vírgula (`,`). + + - **Formato:** `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`). + - **Disponibilidade:** Consulte os pares válidos usando o endpoint + [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). + - **Exemplo:** `USD-BRL,EUR-BRL,BTC-BRL` + + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/currency", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "currency": currency, + "token": token, + }, + currency_retrieve_params.CurrencyRetrieveParams, + ), + ), + cast_to=CurrencyRetrieveResponse, + ) + + async def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CurrencyListAvailableResponse: + """ + Obtenha a lista completa de todas as moedas fiduciárias suportadas pela API, + geralmente utilizadas no parâmetro `currency` de outros endpoints (como o de + criptomoedas) ou para futuras funcionalidades de conversão. + + ### Funcionalidade: + + - Retorna um array `currencies` com os nomes das moedas. + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todas as moedas disponíveis:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/currency/available?token=SEU_TOKEN" + ``` + + **Buscar moedas cujo nome contenha 'Euro':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/currency/available?search=Euro&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `currencies`, contendo um array de + objetos. Cada objeto possui uma chave `currency` com o nome completo da moeda + (ex: `"Dólar Americano/Real Brasileiro"`). **Nota:** O formato do nome pode + indicar um par de moedas, dependendo do contexto interno da API. + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista pelo nome da moeda (correspondência + parcial, case-insensitive). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/currency/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "search": search, + }, + currency_list_available_params.CurrencyListAvailableParams, + ), + ), + cast_to=CurrencyListAvailableResponse, + ) + + +class CurrencyResourceWithRawResponse: + def __init__(self, currency: CurrencyResource) -> None: + self._currency = currency + + self.retrieve = to_raw_response_wrapper( + currency.retrieve, + ) + self.list_available = to_raw_response_wrapper( + currency.list_available, + ) + + +class AsyncCurrencyResourceWithRawResponse: + def __init__(self, currency: AsyncCurrencyResource) -> None: + self._currency = currency + + self.retrieve = async_to_raw_response_wrapper( + currency.retrieve, + ) + self.list_available = async_to_raw_response_wrapper( + currency.list_available, + ) + + +class CurrencyResourceWithStreamingResponse: + def __init__(self, currency: CurrencyResource) -> None: + self._currency = currency + + self.retrieve = to_streamed_response_wrapper( + currency.retrieve, + ) + self.list_available = to_streamed_response_wrapper( + currency.list_available, + ) + + +class AsyncCurrencyResourceWithStreamingResponse: + def __init__(self, currency: AsyncCurrencyResource) -> None: + self._currency = currency + + self.retrieve = async_to_streamed_response_wrapper( + currency.retrieve, + ) + self.list_available = async_to_streamed_response_wrapper( + currency.list_available, + ) diff --git a/src/brapi/resources/v2/inflation.py b/src/brapi/resources/v2/inflation.py new file mode 100644 index 0000000..7098b22 --- /dev/null +++ b/src/brapi/resources/v2/inflation.py @@ -0,0 +1,530 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from datetime import date +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ...types.v2 import inflation_retrieve_params, inflation_list_available_params +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.v2.inflation_retrieve_response import InflationRetrieveResponse +from ...types.v2.inflation_list_available_response import InflationListAvailableResponse + +__all__ = ["InflationResource", "AsyncInflationResource"] + + +class InflationResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> InflationResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return InflationResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> InflationResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return InflationResourceWithStreamingResponse(self) + + def retrieve( + self, + *, + token: str | Omit = omit, + country: str | Omit = omit, + end: Union[str, date] | Omit = omit, + historical: bool | Omit = omit, + sort_by: Literal["date", "value"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + start: Union[str, date] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InflationRetrieveResponse: + """ + Obtenha dados históricos sobre índices de inflação para um país específico. + + ### Funcionalidades: + + - **Seleção de País:** Especifique o país desejado com o parâmetro `country` + (padrão: `brazil`). + - **Filtragem por Período:** Defina um intervalo de datas com `start` e `end` + (formato DD/MM/YYYY). + - **Inclusão de Histórico:** O parâmetro `historical` (booleano) parece + controlar a inclusão de dados históricos (verificar comportamento exato, pode + ser redundante com `start`/`end`). + - **Ordenação:** Ordene os resultados por data (`date`) ou valor (`value`) + usando `sortBy` e `sortOrder`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Buscar dados de inflação do Brasil para o ano de 2022, ordenados por valor + ascendente:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&start=01/01/2022&end=31/12/2022&sortBy=value&sortOrder=asc&token=SEU_TOKEN" + ``` + + **Buscar os dados mais recentes de inflação (sem período definido, ordenação + padrão):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém um array `inflation`, onde cada objeto representa um ponto de + dado de inflação com sua `date` (DD/MM/YYYY), `value` (o índice de inflação como + string) e `epochDate` (timestamp UNIX). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + country: **Opcional.** Nome do país para o qual buscar os dados de inflação. Use nomes em + minúsculas. O padrão é `brazil`. Consulte `/api/v2/inflation/available` para a + lista de países suportados. + + end: **Opcional.** Data final do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `start` for especificado. + + historical: **Opcional.** Booleano (`true` ou `false`). Define se dados históricos devem ser + incluídos. O comportamento exato em conjunto com `start`/`end` deve ser + verificado. Padrão: `false`. + + sort_by: **Opcional.** Campo pelo qual os resultados da inflação serão ordenados. + + sort_order: **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + Padrão: `desc`. Requer que `sortBy` seja especificado. + + start: **Opcional.** Data de início do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `end` for especificado. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/inflation", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "country": country, + "end": end, + "historical": historical, + "sort_by": sort_by, + "sort_order": sort_order, + "start": start, + }, + inflation_retrieve_params.InflationRetrieveParams, + ), + ), + cast_to=InflationRetrieveResponse, + ) + + def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InflationListAvailableResponse: + """ + Obtenha a lista completa de todos os países para os quais a API Brapi possui + dados de inflação disponíveis para consulta no endpoint `/api/v2/inflation`. + + ### Funcionalidade: + + - Retorna um array `countries` com os nomes dos países (em minúsculas). + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todos os países com dados de inflação:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation/available?token=SEU_TOKEN" + ``` + + **Buscar países cujo nome contenha 'arg':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation/available?search=arg&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `countries`, contendo um array de + strings com os nomes dos países (ex: `["brazil", "argentina", "usa"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista pelo nome do país (correspondência + parcial, case-insensitive). Se omitido, retorna todos os países. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/inflation/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "search": search, + }, + inflation_list_available_params.InflationListAvailableParams, + ), + ), + cast_to=InflationListAvailableResponse, + ) + + +class AsyncInflationResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncInflationResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncInflationResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncInflationResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncInflationResourceWithStreamingResponse(self) + + async def retrieve( + self, + *, + token: str | Omit = omit, + country: str | Omit = omit, + end: Union[str, date] | Omit = omit, + historical: bool | Omit = omit, + sort_by: Literal["date", "value"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + start: Union[str, date] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InflationRetrieveResponse: + """ + Obtenha dados históricos sobre índices de inflação para um país específico. + + ### Funcionalidades: + + - **Seleção de País:** Especifique o país desejado com o parâmetro `country` + (padrão: `brazil`). + - **Filtragem por Período:** Defina um intervalo de datas com `start` e `end` + (formato DD/MM/YYYY). + - **Inclusão de Histórico:** O parâmetro `historical` (booleano) parece + controlar a inclusão de dados históricos (verificar comportamento exato, pode + ser redundante com `start`/`end`). + - **Ordenação:** Ordene os resultados por data (`date`) ou valor (`value`) + usando `sortBy` e `sortOrder`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Buscar dados de inflação do Brasil para o ano de 2022, ordenados por valor + ascendente:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&start=01/01/2022&end=31/12/2022&sortBy=value&sortOrder=asc&token=SEU_TOKEN" + ``` + + **Buscar os dados mais recentes de inflação (sem período definido, ordenação + padrão):** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta contém um array `inflation`, onde cada objeto representa um ponto de + dado de inflação com sua `date` (DD/MM/YYYY), `value` (o índice de inflação como + string) e `epochDate` (timestamp UNIX). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + country: **Opcional.** Nome do país para o qual buscar os dados de inflação. Use nomes em + minúsculas. O padrão é `brazil`. Consulte `/api/v2/inflation/available` para a + lista de países suportados. + + end: **Opcional.** Data final do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `start` for especificado. + + historical: **Opcional.** Booleano (`true` ou `false`). Define se dados históricos devem ser + incluídos. O comportamento exato em conjunto com `start`/`end` deve ser + verificado. Padrão: `false`. + + sort_by: **Opcional.** Campo pelo qual os resultados da inflação serão ordenados. + + sort_order: **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + Padrão: `desc`. Requer que `sortBy` seja especificado. + + start: **Opcional.** Data de início do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `end` for especificado. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/inflation", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "country": country, + "end": end, + "historical": historical, + "sort_by": sort_by, + "sort_order": sort_order, + "start": start, + }, + inflation_retrieve_params.InflationRetrieveParams, + ), + ), + cast_to=InflationRetrieveResponse, + ) + + async def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InflationListAvailableResponse: + """ + Obtenha a lista completa de todos os países para os quais a API Brapi possui + dados de inflação disponíveis para consulta no endpoint `/api/v2/inflation`. + + ### Funcionalidade: + + - Retorna um array `countries` com os nomes dos países (em minúsculas). + - Pode ser filtrado usando o parâmetro `search`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar todos os países com dados de inflação:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation/available?token=SEU_TOKEN" + ``` + + **Buscar países cujo nome contenha 'arg':** + + ```bash + curl -X GET "https://brapi.dev/api/v2/inflation/available?search=arg&token=SEU_TOKEN" + ``` + + ### Resposta: + + A resposta é um objeto JSON com a chave `countries`, contendo um array de + strings com os nomes dos países (ex: `["brazil", "argentina", "usa"]`). + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista pelo nome do país (correspondência + parcial, case-insensitive). Se omitido, retorna todos os países. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/inflation/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "search": search, + }, + inflation_list_available_params.InflationListAvailableParams, + ), + ), + cast_to=InflationListAvailableResponse, + ) + + +class InflationResourceWithRawResponse: + def __init__(self, inflation: InflationResource) -> None: + self._inflation = inflation + + self.retrieve = to_raw_response_wrapper( + inflation.retrieve, + ) + self.list_available = to_raw_response_wrapper( + inflation.list_available, + ) + + +class AsyncInflationResourceWithRawResponse: + def __init__(self, inflation: AsyncInflationResource) -> None: + self._inflation = inflation + + self.retrieve = async_to_raw_response_wrapper( + inflation.retrieve, + ) + self.list_available = async_to_raw_response_wrapper( + inflation.list_available, + ) + + +class InflationResourceWithStreamingResponse: + def __init__(self, inflation: InflationResource) -> None: + self._inflation = inflation + + self.retrieve = to_streamed_response_wrapper( + inflation.retrieve, + ) + self.list_available = to_streamed_response_wrapper( + inflation.list_available, + ) + + +class AsyncInflationResourceWithStreamingResponse: + def __init__(self, inflation: AsyncInflationResource) -> None: + self._inflation = inflation + + self.retrieve = async_to_streamed_response_wrapper( + inflation.retrieve, + ) + self.list_available = async_to_streamed_response_wrapper( + inflation.list_available, + ) diff --git a/src/brapi/resources/v2/prime_rate.py b/src/brapi/resources/v2/prime_rate.py new file mode 100644 index 0000000..0dedc9b --- /dev/null +++ b/src/brapi/resources/v2/prime_rate.py @@ -0,0 +1,490 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from datetime import date +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ...types.v2 import prime_rate_retrieve_params, prime_rate_list_available_params +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.v2.prime_rate_retrieve_response import PrimeRateRetrieveResponse +from ...types.v2.prime_rate_list_available_response import PrimeRateListAvailableResponse + +__all__ = ["PrimeRateResource", "AsyncPrimeRateResource"] + + +class PrimeRateResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> PrimeRateResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return PrimeRateResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> PrimeRateResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return PrimeRateResourceWithStreamingResponse(self) + + def retrieve( + self, + *, + token: str | Omit = omit, + country: str | Omit = omit, + end: Union[str, date] | Omit = omit, + historical: bool | Omit = omit, + sort_by: Literal["date", "value"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + start: Union[str, date] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PrimeRateRetrieveResponse: + """ + Obtenha informações atualizadas sobre a taxa básica de juros (SELIC) de um país + por um período determinado. + + ### Funcionalidades: + + - **Seleção por País:** Especifique o país desejado usando o parâmetro `country` + (padrão: brazil). + - **Período Customizado:** Defina datas de início e fim com `start` e `end` para + consultar um intervalo específico. + - **Ordenação:** Ordene os resultados por data ou valor com os parâmetros + `sortBy` e `sortOrder`. + - **Dados Históricos:** Solicite o histórico completo ou apenas o valor mais + recente com o parâmetro `historical`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Taxa de juros do Brasil entre dezembro/2021 e janeiro/2022:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/prime-rate?country=brazil&start=01/12/2021&end=01/01/2022&sortBy=date&sortOrder=desc&token=SEU_TOKEN" + ``` + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + country: **Opcional.** O país do qual você deseja obter informações sobre a taxa básica + de juros. Por padrão, o país é definido como brazil. Você pode consultar a lista + de países disponíveis através do endpoint `/api/v2/prime-rate/available`. + + end: **Opcional.** Data final do período para busca no formato DD/MM/YYYY. Por padrão + é a data atual. Útil quando `historical=true` para restringir o período da série + histórica. + + historical: **Opcional.** Define se os dados históricos serão retornados. Se definido como + `true`, retorna a série histórica completa. Se `false` (padrão) ou omitido, + retorna apenas o valor mais recente. + + sort_by: **Opcional.** Campo pelo qual os resultados serão ordenados. Por padrão, ordena + por `date` (data). + + sort_order: **Opcional.** Define se a ordenação será crescente (`asc`) ou decrescente + (`desc`). Por padrão, é `desc` (decrescente). + + start: **Opcional.** Data inicial do período para busca no formato DD/MM/YYYY. Útil + quando `historical=true` para restringir o período da série histórica. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/prime-rate", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "country": country, + "end": end, + "historical": historical, + "sort_by": sort_by, + "sort_order": sort_order, + "start": start, + }, + prime_rate_retrieve_params.PrimeRateRetrieveParams, + ), + ), + cast_to=PrimeRateRetrieveResponse, + ) + + def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PrimeRateListAvailableResponse: + """ + Liste todos os países disponíveis com dados de taxa básica de juros (SELIC) na + API brapi. Este endpoint facilita a descoberta de quais países possuem dados + disponíveis para consulta através do endpoint principal `/api/v2/prime-rate`. + + ### Funcionalidades: + + - **Busca Filtrada:** Utilize o parâmetro `search` para filtrar países por nome + ou parte do nome. + - **Ideal para Autocomplete:** Perfeito para implementar campos de busca com + autocompletar em interfaces de usuário. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar países que contenham "BR" no nome:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/prime-rate/available?search=BR&token=SEU_TOKEN" + ``` + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de países por nome. Retorna países + cujos nomes contenham o termo especificado (case insensitive). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/api/v2/prime-rate/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "token": token, + "search": search, + }, + prime_rate_list_available_params.PrimeRateListAvailableParams, + ), + ), + cast_to=PrimeRateListAvailableResponse, + ) + + +class AsyncPrimeRateResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncPrimeRateResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncPrimeRateResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncPrimeRateResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncPrimeRateResourceWithStreamingResponse(self) + + async def retrieve( + self, + *, + token: str | Omit = omit, + country: str | Omit = omit, + end: Union[str, date] | Omit = omit, + historical: bool | Omit = omit, + sort_by: Literal["date", "value"] | Omit = omit, + sort_order: Literal["asc", "desc"] | Omit = omit, + start: Union[str, date] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PrimeRateRetrieveResponse: + """ + Obtenha informações atualizadas sobre a taxa básica de juros (SELIC) de um país + por um período determinado. + + ### Funcionalidades: + + - **Seleção por País:** Especifique o país desejado usando o parâmetro `country` + (padrão: brazil). + - **Período Customizado:** Defina datas de início e fim com `start` e `end` para + consultar um intervalo específico. + - **Ordenação:** Ordene os resultados por data ou valor com os parâmetros + `sortBy` e `sortOrder`. + - **Dados Históricos:** Solicite o histórico completo ou apenas o valor mais + recente com o parâmetro `historical`. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Taxa de juros do Brasil entre dezembro/2021 e janeiro/2022:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/prime-rate?country=brazil&start=01/12/2021&end=01/01/2022&sortBy=date&sortOrder=desc&token=SEU_TOKEN" + ``` + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + country: **Opcional.** O país do qual você deseja obter informações sobre a taxa básica + de juros. Por padrão, o país é definido como brazil. Você pode consultar a lista + de países disponíveis através do endpoint `/api/v2/prime-rate/available`. + + end: **Opcional.** Data final do período para busca no formato DD/MM/YYYY. Por padrão + é a data atual. Útil quando `historical=true` para restringir o período da série + histórica. + + historical: **Opcional.** Define se os dados históricos serão retornados. Se definido como + `true`, retorna a série histórica completa. Se `false` (padrão) ou omitido, + retorna apenas o valor mais recente. + + sort_by: **Opcional.** Campo pelo qual os resultados serão ordenados. Por padrão, ordena + por `date` (data). + + sort_order: **Opcional.** Define se a ordenação será crescente (`asc`) ou decrescente + (`desc`). Por padrão, é `desc` (decrescente). + + start: **Opcional.** Data inicial do período para busca no formato DD/MM/YYYY. Útil + quando `historical=true` para restringir o período da série histórica. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/prime-rate", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "country": country, + "end": end, + "historical": historical, + "sort_by": sort_by, + "sort_order": sort_order, + "start": start, + }, + prime_rate_retrieve_params.PrimeRateRetrieveParams, + ), + ), + cast_to=PrimeRateRetrieveResponse, + ) + + async def list_available( + self, + *, + token: str | Omit = omit, + search: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PrimeRateListAvailableResponse: + """ + Liste todos os países disponíveis com dados de taxa básica de juros (SELIC) na + API brapi. Este endpoint facilita a descoberta de quais países possuem dados + disponíveis para consulta através do endpoint principal `/api/v2/prime-rate`. + + ### Funcionalidades: + + - **Busca Filtrada:** Utilize o parâmetro `search` para filtrar países por nome + ou parte do nome. + - **Ideal para Autocomplete:** Perfeito para implementar campos de busca com + autocompletar em interfaces de usuário. + + ### Autenticação: + + Requer token de autenticação via `token` (query) ou `Authorization` (header). + + ### Exemplo de Requisição: + + **Listar países que contenham "BR" no nome:** + + ```bash + curl -X GET "https://brapi.dev/api/v2/prime-rate/available?search=BR&token=SEU_TOKEN" + ``` + + Args: + token: **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + + search: **Opcional.** Termo para filtrar a lista de países por nome. Retorna países + cujos nomes contenham o termo especificado (case insensitive). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/api/v2/prime-rate/available", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "token": token, + "search": search, + }, + prime_rate_list_available_params.PrimeRateListAvailableParams, + ), + ), + cast_to=PrimeRateListAvailableResponse, + ) + + +class PrimeRateResourceWithRawResponse: + def __init__(self, prime_rate: PrimeRateResource) -> None: + self._prime_rate = prime_rate + + self.retrieve = to_raw_response_wrapper( + prime_rate.retrieve, + ) + self.list_available = to_raw_response_wrapper( + prime_rate.list_available, + ) + + +class AsyncPrimeRateResourceWithRawResponse: + def __init__(self, prime_rate: AsyncPrimeRateResource) -> None: + self._prime_rate = prime_rate + + self.retrieve = async_to_raw_response_wrapper( + prime_rate.retrieve, + ) + self.list_available = async_to_raw_response_wrapper( + prime_rate.list_available, + ) + + +class PrimeRateResourceWithStreamingResponse: + def __init__(self, prime_rate: PrimeRateResource) -> None: + self._prime_rate = prime_rate + + self.retrieve = to_streamed_response_wrapper( + prime_rate.retrieve, + ) + self.list_available = to_streamed_response_wrapper( + prime_rate.list_available, + ) + + +class AsyncPrimeRateResourceWithStreamingResponse: + def __init__(self, prime_rate: AsyncPrimeRateResource) -> None: + self._prime_rate = prime_rate + + self.retrieve = async_to_streamed_response_wrapper( + prime_rate.retrieve, + ) + self.list_available = async_to_streamed_response_wrapper( + prime_rate.list_available, + ) diff --git a/src/brapi/resources/v2/v2.py b/src/brapi/resources/v2/v2.py new file mode 100644 index 0000000..3fa73c9 --- /dev/null +++ b/src/brapi/resources/v2/v2.py @@ -0,0 +1,198 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .crypto import ( + CryptoResource, + AsyncCryptoResource, + CryptoResourceWithRawResponse, + AsyncCryptoResourceWithRawResponse, + CryptoResourceWithStreamingResponse, + AsyncCryptoResourceWithStreamingResponse, +) +from .currency import ( + CurrencyResource, + AsyncCurrencyResource, + CurrencyResourceWithRawResponse, + AsyncCurrencyResourceWithRawResponse, + CurrencyResourceWithStreamingResponse, + AsyncCurrencyResourceWithStreamingResponse, +) +from ..._compat import cached_property +from .inflation import ( + InflationResource, + AsyncInflationResource, + InflationResourceWithRawResponse, + AsyncInflationResourceWithRawResponse, + InflationResourceWithStreamingResponse, + AsyncInflationResourceWithStreamingResponse, +) +from .prime_rate import ( + PrimeRateResource, + AsyncPrimeRateResource, + PrimeRateResourceWithRawResponse, + AsyncPrimeRateResourceWithRawResponse, + PrimeRateResourceWithStreamingResponse, + AsyncPrimeRateResourceWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource + +__all__ = ["V2Resource", "AsyncV2Resource"] + + +class V2Resource(SyncAPIResource): + @cached_property + def crypto(self) -> CryptoResource: + return CryptoResource(self._client) + + @cached_property + def currency(self) -> CurrencyResource: + return CurrencyResource(self._client) + + @cached_property + def inflation(self) -> InflationResource: + return InflationResource(self._client) + + @cached_property + def prime_rate(self) -> PrimeRateResource: + return PrimeRateResource(self._client) + + @cached_property + def with_raw_response(self) -> V2ResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return V2ResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> V2ResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return V2ResourceWithStreamingResponse(self) + + +class AsyncV2Resource(AsyncAPIResource): + @cached_property + def crypto(self) -> AsyncCryptoResource: + return AsyncCryptoResource(self._client) + + @cached_property + def currency(self) -> AsyncCurrencyResource: + return AsyncCurrencyResource(self._client) + + @cached_property + def inflation(self) -> AsyncInflationResource: + return AsyncInflationResource(self._client) + + @cached_property + def prime_rate(self) -> AsyncPrimeRateResource: + return AsyncPrimeRateResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncV2ResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/brapi-dev/brapi-python#accessing-raw-response-data-eg-headers + """ + return AsyncV2ResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncV2ResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/brapi-dev/brapi-python#with_streaming_response + """ + return AsyncV2ResourceWithStreamingResponse(self) + + +class V2ResourceWithRawResponse: + def __init__(self, v2: V2Resource) -> None: + self._v2 = v2 + + @cached_property + def crypto(self) -> CryptoResourceWithRawResponse: + return CryptoResourceWithRawResponse(self._v2.crypto) + + @cached_property + def currency(self) -> CurrencyResourceWithRawResponse: + return CurrencyResourceWithRawResponse(self._v2.currency) + + @cached_property + def inflation(self) -> InflationResourceWithRawResponse: + return InflationResourceWithRawResponse(self._v2.inflation) + + @cached_property + def prime_rate(self) -> PrimeRateResourceWithRawResponse: + return PrimeRateResourceWithRawResponse(self._v2.prime_rate) + + +class AsyncV2ResourceWithRawResponse: + def __init__(self, v2: AsyncV2Resource) -> None: + self._v2 = v2 + + @cached_property + def crypto(self) -> AsyncCryptoResourceWithRawResponse: + return AsyncCryptoResourceWithRawResponse(self._v2.crypto) + + @cached_property + def currency(self) -> AsyncCurrencyResourceWithRawResponse: + return AsyncCurrencyResourceWithRawResponse(self._v2.currency) + + @cached_property + def inflation(self) -> AsyncInflationResourceWithRawResponse: + return AsyncInflationResourceWithRawResponse(self._v2.inflation) + + @cached_property + def prime_rate(self) -> AsyncPrimeRateResourceWithRawResponse: + return AsyncPrimeRateResourceWithRawResponse(self._v2.prime_rate) + + +class V2ResourceWithStreamingResponse: + def __init__(self, v2: V2Resource) -> None: + self._v2 = v2 + + @cached_property + def crypto(self) -> CryptoResourceWithStreamingResponse: + return CryptoResourceWithStreamingResponse(self._v2.crypto) + + @cached_property + def currency(self) -> CurrencyResourceWithStreamingResponse: + return CurrencyResourceWithStreamingResponse(self._v2.currency) + + @cached_property + def inflation(self) -> InflationResourceWithStreamingResponse: + return InflationResourceWithStreamingResponse(self._v2.inflation) + + @cached_property + def prime_rate(self) -> PrimeRateResourceWithStreamingResponse: + return PrimeRateResourceWithStreamingResponse(self._v2.prime_rate) + + +class AsyncV2ResourceWithStreamingResponse: + def __init__(self, v2: AsyncV2Resource) -> None: + self._v2 = v2 + + @cached_property + def crypto(self) -> AsyncCryptoResourceWithStreamingResponse: + return AsyncCryptoResourceWithStreamingResponse(self._v2.crypto) + + @cached_property + def currency(self) -> AsyncCurrencyResourceWithStreamingResponse: + return AsyncCurrencyResourceWithStreamingResponse(self._v2.currency) + + @cached_property + def inflation(self) -> AsyncInflationResourceWithStreamingResponse: + return AsyncInflationResourceWithStreamingResponse(self._v2.inflation) + + @cached_property + def prime_rate(self) -> AsyncPrimeRateResourceWithStreamingResponse: + return AsyncPrimeRateResourceWithStreamingResponse(self._v2.prime_rate) diff --git a/src/brapi/types/__init__.py b/src/brapi/types/__init__.py new file mode 100644 index 0000000..91dde6b --- /dev/null +++ b/src/brapi/types/__init__.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .cashflow_entry import CashflowEntry as CashflowEntry +from .quote_list_params import QuoteListParams as QuoteListParams +from .value_added_entry import ValueAddedEntry as ValueAddedEntry +from .balance_sheet_entry import BalanceSheetEntry as BalanceSheetEntry +from .quote_list_response import QuoteListResponse as QuoteListResponse +from .financial_data_entry import FinancialDataEntry as FinancialDataEntry +from .available_list_params import AvailableListParams as AvailableListParams +from .quote_retrieve_params import QuoteRetrieveParams as QuoteRetrieveParams +from .income_statement_entry import IncomeStatementEntry as IncomeStatementEntry +from .available_list_response import AvailableListResponse as AvailableListResponse +from .quote_retrieve_response import QuoteRetrieveResponse as QuoteRetrieveResponse +from .default_key_statistics_entry import DefaultKeyStatisticsEntry as DefaultKeyStatisticsEntry diff --git a/src/brapi/types/available_list_params.py b/src/brapi/types/available_list_params.py new file mode 100644 index 0000000..631921d --- /dev/null +++ b/src/brapi/types/available_list_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["AvailableListParams"] + + +class AvailableListParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + search: str + """ + **Opcional.** Termo para filtrar a lista de tickers (correspondência parcial, + case-insensitive). Se omitido, retorna todos os tickers. + """ diff --git a/src/brapi/types/available_list_response.py b/src/brapi/types/available_list_response.py new file mode 100644 index 0000000..20f59dd --- /dev/null +++ b/src/brapi/types/available_list_response.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from .._models import BaseModel + +__all__ = ["AvailableListResponse"] + + +class AvailableListResponse(BaseModel): + indexes: List[str] + """Lista de tickers de **índices** disponíveis (ex: `^BVSP`, `^IFIX`).""" + + stocks: List[str] + """ + Lista de tickers de **ações, FIIs, BDRs e ETFs** disponíveis (ex: `PETR4`, + `VALE3`, `MXRF11`). + """ diff --git a/src/brapi/types/balance_sheet_entry.py b/src/brapi/types/balance_sheet_entry.py new file mode 100644 index 0000000..f40cd15 --- /dev/null +++ b/src/brapi/types/balance_sheet_entry.py @@ -0,0 +1,455 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["BalanceSheetEntry"] + + +class BalanceSheetEntry(BaseModel): + accounts_payable: Optional[float] = FieldInfo(alias="accountsPayable", default=None) + """Contas a pagar (fornecedores).""" + + accounts_receivable_from_clients: Optional[float] = FieldInfo(alias="accountsReceivableFromClients", default=None) + """Contas a receber de clientes (bruto).""" + + accumulated_profits_or_losses: Optional[float] = FieldInfo(alias="accumulatedProfitsOrLosses", default=None) + """Lucros ou prejuízos acumulados.""" + + advance_for_future_capital_increase: Optional[float] = FieldInfo( + alias="advanceForFutureCapitalIncrease", default=None + ) + """Adiantamento para futuro aumento de capital (AFAC).""" + + biological_assets: Optional[float] = FieldInfo(alias="biologicalAssets", default=None) + """Ativos biológicos.""" + + capitalization: Optional[float] = None + """Obrigações de capitalização.""" + + capital_reserves: Optional[float] = FieldInfo(alias="capitalReserves", default=None) + """Reservas de capital (sinônimo de `capitalSurplus`).""" + + capital_surplus: Optional[float] = FieldInfo(alias="capitalSurplus", default=None) + """Reservas de capital.""" + + cash: Optional[float] = None + """Caixa e equivalentes de caixa.""" + + central_bank_compulsory_deposit: Optional[float] = FieldInfo(alias="centralBankCompulsoryDeposit", default=None) + """Depósitos compulsórios no Banco Central.""" + + common_stock: Optional[float] = FieldInfo(alias="commonStock", default=None) + """Capital social realizado.""" + + complementary_pension: Optional[float] = FieldInfo(alias="complementaryPension", default=None) + """Obrigações de previdência complementar.""" + + compulsory_loans_and_deposits: Optional[float] = FieldInfo(alias="compulsoryLoansAndDeposits", default=None) + """Empréstimos e depósitos compulsórios.""" + + controller_shareholders_equity: Optional[float] = FieldInfo(alias="controllerShareholdersEquity", default=None) + """Patrimônio líquido atribuível aos controladores.""" + + credits_from_operations: Optional[float] = FieldInfo(alias="creditsFromOperations", default=None) + """Créditos oriundos de operações (instituições financeiras/seguradoras).""" + + credits_with_related_parties: Optional[float] = FieldInfo(alias="creditsWithRelatedParties", default=None) + """Créditos com partes relacionadas.""" + + cumulative_conversion_adjustments: Optional[float] = FieldInfo( + alias="cumulativeConversionAdjustments", default=None + ) + """Ajustes acumulados de conversão.""" + + current_and_deferred_taxes: Optional[float] = FieldInfo(alias="currentAndDeferredTaxes", default=None) + """Tributos correntes e diferidos no ativo.""" + + current_liabilities: Optional[float] = FieldInfo(alias="currentLiabilities", default=None) + """Total do passivo circulante (sinônimo de `totalCurrentLiabilities`).""" + + debentures: Optional[float] = None + """Debêntures (passivo circulante).""" + + debits_from_capitalization: Optional[float] = FieldInfo(alias="debitsFromCapitalization", default=None) + """Débitos de operações de capitalização.""" + + debits_from_complementary_pension: Optional[float] = FieldInfo(alias="debitsFromComplementaryPension", default=None) + """Débitos de operações de previdência complementar.""" + + debits_from_insurance_and_reinsurance: Optional[float] = FieldInfo( + alias="debitsFromInsuranceAndReinsurance", default=None + ) + """Débitos de operações de seguros e resseguros.""" + + debits_from_operations: Optional[float] = FieldInfo(alias="debitsFromOperations", default=None) + """Débitos oriundos de operações.""" + + debits_from_other_operations: Optional[float] = FieldInfo(alias="debitsFromOtherOperations", default=None) + """Débitos de outras operações.""" + + deferred_long_term_asset_charges: Optional[float] = FieldInfo(alias="deferredLongTermAssetCharges", default=None) + """Encargos diferidos de ativos de longo prazo.""" + + deferred_long_term_liab: Optional[float] = FieldInfo(alias="deferredLongTermLiab", default=None) + """Passivos fiscais diferidos (longo prazo).""" + + deferred_selling_expenses: Optional[float] = FieldInfo(alias="deferredSellingExpenses", default=None) + """Despesas de comercialização diferidas.""" + + deferred_taxes: Optional[float] = FieldInfo(alias="deferredTaxes", default=None) + """Tributos diferidos no ativo.""" + + end_date: Optional[date] = FieldInfo(alias="endDate", default=None) + """Data de término do período fiscal ao qual o balanço se refere (YYYY-MM-DD).""" + + equity_valuation_adjustments: Optional[float] = FieldInfo(alias="equityValuationAdjustments", default=None) + """Ajustes de avaliação patrimonial.""" + + financial_assets: Optional[float] = FieldInfo(alias="financialAssets", default=None) + """Ativos financeiros (agregado de instrumentos financeiros no ativo).""" + + financial_assets_at_amortized_cost: Optional[float] = FieldInfo( + alias="financialAssetsAtAmortizedCost", default=None + ) + """Ativos financeiros ao custo amortizado.""" + + financial_assets_measured_at_fair_value_through_other_comprehensive_income: Optional[float] = FieldInfo( + alias="financialAssetsMeasuredAtFairValueThroughOtherComprehensiveIncome", default=None + ) + """ + Ativos financeiros mensurados a valor justo por outros resultados abrangentes + (FVOCI). + """ + + financial_assets_measured_at_fair_value_through_profit_or_loss: Optional[float] = FieldInfo( + alias="financialAssetsMeasuredAtFairValueThroughProfitOrLoss", default=None + ) + """Ativos financeiros mensurados a valor justo por meio do resultado (FVTPL).""" + + financial_investments_measured_at_amortized_cost: Optional[float] = FieldInfo( + alias="financialInvestmentsMeasuredAtAmortizedCost", default=None + ) + """Investimentos financeiros mensurados ao custo amortizado.""" + + financial_investments_measured_at_fair_value_through_other_comprehensive_income: Optional[float] = FieldInfo( + alias="financialInvestmentsMeasuredAtFairValueThroughOtherComprehensiveIncome", default=None + ) + """ + Investimentos financeiros mensurados a valor justo por outros resultados + abrangentes. + """ + + financial_liabilities_at_amortized_cost: Optional[float] = FieldInfo( + alias="financialLiabilitiesAtAmortizedCost", default=None + ) + """Passivos financeiros ao custo amortizado.""" + + financial_liabilities_measured_at_fair_value_through_income: Optional[float] = FieldInfo( + alias="financialLiabilitiesMeasuredAtFairValueThroughIncome", default=None + ) + """Passivos financeiros mensurados a valor justo por meio do resultado.""" + + foreign_suppliers: Optional[float] = FieldInfo(alias="foreignSuppliers", default=None) + """Fornecedores estrangeiros.""" + + good_will: Optional[float] = FieldInfo(alias="goodWill", default=None) + """Ágio por expectativa de rentabilidade futura (Goodwill).""" + + insurance_and_reinsurance: Optional[float] = FieldInfo(alias="insuranceAndReinsurance", default=None) + """Provisões/obrigações de seguros e resseguros.""" + + intangible_asset: Optional[float] = FieldInfo(alias="intangibleAsset", default=None) + """Ativo intangível (valor agregado).""" + + intangible_assets: Optional[float] = FieldInfo(alias="intangibleAssets", default=None) + """Ativos intangíveis (marcas, patentes, etc.).""" + + inventory: Optional[float] = None + """Estoques.""" + + investment_properties: Optional[float] = FieldInfo(alias="investmentProperties", default=None) + """Propriedades para investimento.""" + + investments: Optional[float] = None + """Investimentos (participações e outros).""" + + lease_financing: Optional[float] = FieldInfo(alias="leaseFinancing", default=None) + """Financiamento por arrendamento mercantil (circulante).""" + + loans_and_financing: Optional[float] = FieldInfo(alias="loansAndFinancing", default=None) + """Empréstimos e financiamentos (circulante).""" + + loans_and_financing_in_foreign_currency: Optional[float] = FieldInfo( + alias="loansAndFinancingInForeignCurrency", default=None + ) + """Empréstimos e financiamentos em moeda estrangeira (circulante).""" + + loans_and_financing_in_national_currency: Optional[float] = FieldInfo( + alias="loansAndFinancingInNationalCurrency", default=None + ) + """Empréstimos e financiamentos em moeda nacional (circulante).""" + + long_term_accounts_payable: Optional[float] = FieldInfo(alias="longTermAccountsPayable", default=None) + """Fornecedores/contas a pagar de longo prazo.""" + + long_term_accounts_receivable_from_clients: Optional[float] = FieldInfo( + alias="longTermAccountsReceivableFromClients", default=None + ) + """Contas a receber de clientes - longo prazo.""" + + long_term_assets: Optional[float] = FieldInfo(alias="longTermAssets", default=None) + """Total do ativo não circulante (agregado).""" + + long_term_biological_assets: Optional[float] = FieldInfo(alias="longTermBiologicalAssets", default=None) + """Ativos biológicos de longo prazo.""" + + long_term_capitalization: Optional[float] = FieldInfo(alias="longTermCapitalization", default=None) + """Obrigações de capitalização de longo prazo.""" + + long_term_complementary_pension: Optional[float] = FieldInfo(alias="longTermComplementaryPension", default=None) + """Obrigações de previdência complementar de longo prazo.""" + + long_term_debentures: Optional[float] = FieldInfo(alias="longTermDebentures", default=None) + """Debêntures (passivo não circulante).""" + + long_term_debits_from_operations: Optional[float] = FieldInfo(alias="longTermDebitsFromOperations", default=None) + """Débitos de operações (longo prazo).""" + + long_term_debt: Optional[float] = FieldInfo(alias="longTermDebt", default=None) + """Dívida de longo prazo (empréstimos e financiamentos não circulantes).""" + + long_term_deferred_taxes: Optional[float] = FieldInfo(alias="longTermDeferredTaxes", default=None) + """Tributos diferidos (Ativo Não Circulante).""" + + long_term_financial_investments_measured_at_fair_value_through_income: Optional[float] = FieldInfo( + alias="longTermFinancialInvestmentsMeasuredAtFairValueThroughIncome", default=None + ) + """ + Investimentos financeiros de longo prazo mensurados a valor justo por meio do + resultado. + """ + + long_term_insurance_and_reinsurance: Optional[float] = FieldInfo( + alias="longTermInsuranceAndReinsurance", default=None + ) + """Obrigações de seguros e resseguros de longo prazo.""" + + long_term_inventory: Optional[float] = FieldInfo(alias="longTermInventory", default=None) + """Estoques de longo prazo.""" + + long_term_investments: Optional[float] = FieldInfo(alias="longTermInvestments", default=None) + """Investimentos de longo prazo.""" + + long_term_lease_financing: Optional[float] = FieldInfo(alias="longTermLeaseFinancing", default=None) + """Financiamento por arrendamento mercantil (não circulante).""" + + long_term_liabilities: Optional[float] = FieldInfo(alias="longTermLiabilities", default=None) + """Total do passivo de longo prazo.""" + + long_term_loans_and_financing: Optional[float] = FieldInfo(alias="longTermLoansAndFinancing", default=None) + """Empréstimos e financiamentos (não circulante).""" + + long_term_loans_and_financing_in_foreign_currency: Optional[float] = FieldInfo( + alias="longTermLoansAndFinancingInForeignCurrency", default=None + ) + """Empréstimos e financiamentos em moeda estrangeira (não circulante).""" + + long_term_loans_and_financing_in_national_currency: Optional[float] = FieldInfo( + alias="longTermLoansAndFinancingInNationalCurrency", default=None + ) + """Empréstimos e financiamentos em moeda nacional (não circulante).""" + + long_term_prepaid_expenses: Optional[float] = FieldInfo(alias="longTermPrepaidExpenses", default=None) + """Despesas antecipadas de longo prazo.""" + + long_term_provisions: Optional[float] = FieldInfo(alias="longTermProvisions", default=None) + """Provisões (passivo não circulante).""" + + long_term_realizable_assets: Optional[float] = FieldInfo(alias="longTermRealizableAssets", default=None) + """Ativo realizável a longo prazo.""" + + long_term_receivables: Optional[float] = FieldInfo(alias="longTermReceivables", default=None) + """Contas a receber de longo prazo.""" + + long_term_technical_provisions: Optional[float] = FieldInfo(alias="longTermTechnicalProvisions", default=None) + """Provisões técnicas de longo prazo.""" + + minority_interest: Optional[float] = FieldInfo(alias="minorityInterest", default=None) + """Participação de não controladores (no patrimônio líquido).""" + + national_suppliers: Optional[float] = FieldInfo(alias="nationalSuppliers", default=None) + """Fornecedores nacionais.""" + + net_receivables: Optional[float] = FieldInfo(alias="netReceivables", default=None) + """Contas a receber líquidas (clientes).""" + + net_tangible_assets: Optional[float] = FieldInfo(alias="netTangibleAssets", default=None) + """Ativos tangíveis líquidos (Ativo Total - Intangíveis - Passivo Total).""" + + non_controlling_shareholders_equity: Optional[float] = FieldInfo( + alias="nonControllingShareholdersEquity", default=None + ) + """Participação dos não controladores no patrimônio líquido.""" + + non_current_assets: Optional[float] = FieldInfo(alias="nonCurrentAssets", default=None) + """Total do ativo não circulante (sinônimo de `longTermAssets`).""" + + non_current_liabilities: Optional[float] = FieldInfo(alias="nonCurrentLiabilities", default=None) + """Total do passivo não circulante.""" + + other_accounts_receivable: Optional[float] = FieldInfo(alias="otherAccountsReceivable", default=None) + """Outras contas a receber.""" + + other_assets: Optional[float] = FieldInfo(alias="otherAssets", default=None) + """Outros ativos não circulantes.""" + + other_comprehensive_results: Optional[float] = FieldInfo(alias="otherComprehensiveResults", default=None) + """Outros resultados abrangentes.""" + + other_current_assets: Optional[float] = FieldInfo(alias="otherCurrentAssets", default=None) + """Outros ativos circulantes.""" + + other_current_liab: Optional[float] = FieldInfo(alias="otherCurrentLiab", default=None) + """Outros passivos circulantes.""" + + other_current_liabilities: Optional[float] = FieldInfo(alias="otherCurrentLiabilities", default=None) + """Outros passivos circulantes (sinônimo de `otherCurrentLiab`).""" + + other_debits: Optional[float] = FieldInfo(alias="otherDebits", default=None) + """Outros débitos.""" + + other_liab: Optional[float] = FieldInfo(alias="otherLiab", default=None) + """Outros passivos não circulantes.""" + + other_liabilities: Optional[float] = FieldInfo(alias="otherLiabilities", default=None) + """Outros passivos.""" + + other_long_term_obligations: Optional[float] = FieldInfo(alias="otherLongTermObligations", default=None) + """Outras obrigações (passivo não circulante).""" + + other_long_term_provisions: Optional[float] = FieldInfo(alias="otherLongTermProvisions", default=None) + """Outras provisões de longo prazo.""" + + other_long_term_receivables: Optional[float] = FieldInfo(alias="otherLongTermReceivables", default=None) + """Outros créditos/recebíveis de longo prazo.""" + + other_non_current_assets: Optional[float] = FieldInfo(alias="otherNonCurrentAssets", default=None) + """Outros ativos não circulantes (detalhamento).""" + + other_non_current_liabilities: Optional[float] = FieldInfo(alias="otherNonCurrentLiabilities", default=None) + """Outros passivos não circulantes.""" + + other_obligations: Optional[float] = FieldInfo(alias="otherObligations", default=None) + """Outras obrigações (circulante).""" + + other_operations: Optional[float] = FieldInfo(alias="otherOperations", default=None) + """Outras contas operacionais no ativo.""" + + other_provisions: Optional[float] = FieldInfo(alias="otherProvisions", default=None) + """Outras provisões (diversas).""" + + other_stockholder_equity: Optional[float] = FieldInfo(alias="otherStockholderEquity", default=None) + """Outros componentes do patrimônio líquido.""" + + other_values_and_assets: Optional[float] = FieldInfo(alias="otherValuesAndAssets", default=None) + """Outros valores e bens.""" + + prepaid_expenses: Optional[float] = FieldInfo(alias="prepaidExpenses", default=None) + """Despesas antecipadas.""" + + profit_reserves: Optional[float] = FieldInfo(alias="profitReserves", default=None) + """Reservas de lucros.""" + + profits_and_revenues_to_be_appropriated: Optional[float] = FieldInfo( + alias="profitsAndRevenuesToBeAppropriated", default=None + ) + """Lucros e receitas a apropriar.""" + + property_plant_equipment: Optional[float] = FieldInfo(alias="propertyPlantEquipment", default=None) + """Imobilizado (propriedades, instalações e equipamentos).""" + + providers: Optional[float] = None + """Fornecedores (sinônimo de `accountsPayable`).""" + + provisions: Optional[float] = None + """Provisões (passivo).""" + + realized_share_capital: Optional[float] = FieldInfo(alias="realizedShareCapital", default=None) + """Capital social realizado (sinônimo de `commonStock`).""" + + retained_earnings: Optional[float] = FieldInfo(alias="retainedEarnings", default=None) + """Lucros/Prejuízos acumulados.""" + + revaluation_reserves: Optional[float] = FieldInfo(alias="revaluationReserves", default=None) + """Reservas de reavaliação.""" + + securities_and_credits_receivable: Optional[float] = FieldInfo(alias="securitiesAndCreditsReceivable", default=None) + """Títulos e créditos a receber.""" + + shareholders_equity: Optional[float] = FieldInfo(alias="shareholdersEquity", default=None) + """Patrimônio líquido (sinônimo de `totalStockholderEquity`).""" + + shareholdings: Optional[float] = None + """Participações societárias.""" + + short_long_term_debt: Optional[float] = FieldInfo(alias="shortLongTermDebt", default=None) + """Dívida de curto prazo (empréstimos e financiamentos circulantes).""" + + short_term_investments: Optional[float] = FieldInfo(alias="shortTermInvestments", default=None) + """Aplicações financeiras de curto prazo.""" + + social_and_labor_obligations: Optional[float] = FieldInfo(alias="socialAndLaborObligations", default=None) + """Obrigações sociais e trabalhistas.""" + + symbol: Optional[str] = None + """Ticker do ativo ao qual o balanço se refere.""" + + taxes_to_recover: Optional[float] = FieldInfo(alias="taxesToRecover", default=None) + """Impostos a recuperar.""" + + tax_liabilities: Optional[float] = FieldInfo(alias="taxLiabilities", default=None) + """Obrigações fiscais (passivo).""" + + tax_obligations: Optional[float] = FieldInfo(alias="taxObligations", default=None) + """Obrigações fiscais (passivo circulante).""" + + technical_provisions: Optional[float] = FieldInfo(alias="technicalProvisions", default=None) + """Provisões técnicas (seguradoras/previdência).""" + + third_party_deposits: Optional[float] = FieldInfo(alias="thirdPartyDeposits", default=None) + """Depósitos de terceiros.""" + + total_assets: Optional[float] = FieldInfo(alias="totalAssets", default=None) + """Total do ativo.""" + + total_current_assets: Optional[float] = FieldInfo(alias="totalCurrentAssets", default=None) + """Total do ativo circulante.""" + + total_current_liabilities: Optional[float] = FieldInfo(alias="totalCurrentLiabilities", default=None) + """Total do passivo circulante.""" + + total_liab: Optional[float] = FieldInfo(alias="totalLiab", default=None) + """Total do passivo (circulante + não circulante).""" + + total_liabilities: Optional[float] = FieldInfo(alias="totalLiabilities", default=None) + """Total do passivo.""" + + total_stockholder_equity: Optional[float] = FieldInfo(alias="totalStockholderEquity", default=None) + """Total do patrimônio líquido.""" + + treasury_stock: Optional[float] = FieldInfo(alias="treasuryStock", default=None) + """Ações em tesouraria.""" + + type: Optional[Literal["yearly", "quarterly"]] = None + """ + Indica a periodicidade do balanço: `yearly` (anual) ou `quarterly` (trimestral). + """ + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """Data da última atualização deste registro (YYYY-MM-DD).""" diff --git a/src/brapi/types/cashflow_entry.py b/src/brapi/types/cashflow_entry.py new file mode 100644 index 0000000..b8b7fb9 --- /dev/null +++ b/src/brapi/types/cashflow_entry.py @@ -0,0 +1,89 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["CashflowEntry"] + + +class CashflowEntry(BaseModel): + adjustments_to_profit_or_loss: Optional[float] = FieldInfo(alias="adjustmentsToProfitOrLoss", default=None) + """ + Ajustes ao lucro/prejuízo (depreciação, amortização, equivalência patrimonial, + variações não caixa). + """ + + cash_generated_in_operations: Optional[float] = FieldInfo(alias="cashGeneratedInOperations", default=None) + """Caixa gerado nas operações (após variações no capital de giro).""" + + changes_in_assets_and_liabilities: Optional[float] = FieldInfo(alias="changesInAssetsAndLiabilities", default=None) + """ + Variações em Ativos e Passivos Operacionais (Clientes, Estoques, Fornecedores, + etc.). + """ + + end_date: Optional[date] = FieldInfo(alias="endDate", default=None) + """Data de término do período fiscal ao qual a DFC se refere (YYYY-MM-DD).""" + + exchange_variation_without_cash: Optional[float] = FieldInfo(alias="exchangeVariationWithoutCash", default=None) + """Variação cambial sem efeito caixa (ajuste de conversão).""" + + final_cash_balance: Optional[float] = FieldInfo(alias="finalCashBalance", default=None) + """Saldo Final de Caixa e Equivalentes no final do período.""" + + financing_cash_flow: Optional[float] = FieldInfo(alias="financingCashFlow", default=None) + """ + Fluxo de Caixa das Atividades de Financiamento (FCF) (Captação/Pagamento de + Empréstimos, Emissão/Recompra de Ações, Dividendos pagos). + """ + + foreign_exchange_rate_without_cash: Optional[float] = FieldInfo( + alias="foreignExchangeRateWithoutCash", default=None + ) + """Efeito da Variação Cambial sobre o Caixa e Equivalentes.""" + + income_from_operations: Optional[float] = FieldInfo(alias="incomeFromOperations", default=None) + """Caixa Gerado nas Operações (antes das variações de ativos/passivos).""" + + increase_or_decrease_in_cash: Optional[float] = FieldInfo(alias="increaseOrDecreaseInCash", default=None) + """ + Aumento ou Redução Líquida de Caixa e Equivalentes (FCO + FCI + FCF + Variação + Cambial). + """ + + initial_cash_balance: Optional[float] = FieldInfo(alias="initialCashBalance", default=None) + """Saldo Inicial de Caixa e Equivalentes no início do período.""" + + investment_cash_flow: Optional[float] = FieldInfo(alias="investmentCashFlow", default=None) + """ + Fluxo de Caixa das Atividades de Investimento (FCI) (Compra/Venda de + Imobilizado, Investimentos). + """ + + net_income_before_taxes: Optional[float] = FieldInfo(alias="netIncomeBeforeTaxes", default=None) + """ + Lucro líquido antes dos impostos (base para reconciliação pelo método indireto). + """ + + operating_cash_flow: Optional[float] = FieldInfo(alias="operatingCashFlow", default=None) + """Fluxo de Caixa das Atividades Operacionais (FCO).""" + + other_operating_activities: Optional[float] = FieldInfo(alias="otherOperatingActivities", default=None) + """Outras Atividades Operacionais (Juros pagos/recebidos, Impostos pagos, etc.).""" + + symbol: Optional[str] = None + """Ticker do ativo ao qual a DFC se refere.""" + + type: Optional[Literal["yearly", "quarterly"]] = None + """Indica a periodicidade da DFC: `yearly` (anual) ou `quarterly` (trimestral).""" + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """ + Data da última atualização deste registro específico na fonte de dados + (YYYY-MM-DD). + """ diff --git a/src/brapi/types/default_key_statistics_entry.py b/src/brapi/types/default_key_statistics_entry.py new file mode 100644 index 0000000..dc1315b --- /dev/null +++ b/src/brapi/types/default_key_statistics_entry.py @@ -0,0 +1,138 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["DefaultKeyStatisticsEntry"] + + +class DefaultKeyStatisticsEntry(BaseModel): + api_52_week_change: Optional[float] = FieldInfo(alias="52WeekChange", default=None) + """Variação percentual do preço da ação nas últimas 52 semanas.""" + + beta: Optional[float] = None + """Beta da ação (sensibilidade em relação ao mercado).""" + + book_value: Optional[float] = FieldInfo(alias="bookValue", default=None) + """Valor Patrimonial por Ação (VPA): Patrimônio Líquido / Ações em Circulação.""" + + dividend_yield: Optional[float] = FieldInfo(alias="dividendYield", default=None) + """Dividend Yield (provento anualizado sobre o preço atual).""" + + earnings_annual_growth: Optional[float] = FieldInfo(alias="earningsAnnualGrowth", default=None) + """ + Crescimento percentual do lucro líquido no último ano fiscal completo em relação + ao ano anterior. + """ + + earnings_quarterly_growth: Optional[float] = FieldInfo(alias="earningsQuarterlyGrowth", default=None) + """ + Crescimento percentual do lucro líquido no último trimestre em relação ao mesmo + trimestre do ano anterior (YoY). + """ + + enterprise_to_ebitda: Optional[float] = FieldInfo(alias="enterpriseToEbitda", default=None) + """Múltiplo EV/EBITDA (Enterprise Value / EBITDA TTM).""" + + enterprise_to_revenue: Optional[float] = FieldInfo(alias="enterpriseToRevenue", default=None) + """Múltiplo EV/Receita (Enterprise Value / Receita Líquida TTM).""" + + enterprise_value: Optional[float] = FieldInfo(alias="enterpriseValue", default=None) + """Valor da Firma (Enterprise Value - EV): Market Cap + Dívida Total - Caixa.""" + + float_shares: Optional[float] = FieldInfo(alias="floatShares", default=None) + """Ações em livre circulação (free float).""" + + forward_eps: Optional[float] = FieldInfo(alias="forwardEps", default=None) + """Lucro Por Ação projetado (próximo período).""" + + forward_pe: Optional[float] = FieldInfo(alias="forwardPE", default=None) + """ + Preço / Lucro Projetado (Forward P/E): Preço da Ação / LPA estimado para o + próximo período. + """ + + held_percent_insiders: Optional[float] = FieldInfo(alias="heldPercentInsiders", default=None) + """Percentual de ações detidas por insiders (administradores, controladores).""" + + held_percent_institutions: Optional[float] = FieldInfo(alias="heldPercentInstitutions", default=None) + """ + Percentual de ações detidas por instituições (fundos, investidores + institucionais). + """ + + implied_shares_outstanding: Optional[float] = FieldInfo(alias="impliedSharesOutstanding", default=None) + """Ações implícitas em circulação (considerando diluição/derivativos).""" + + last_dividend_date: Optional[date] = FieldInfo(alias="lastDividendDate", default=None) + """Data de pagamento (ou 'Data Com') do último dividendo/JCP (YYYY-MM-DD).""" + + last_dividend_value: Optional[float] = FieldInfo(alias="lastDividendValue", default=None) + """Valor do último dividendo ou JCP pago por ação.""" + + last_fiscal_year_end: Optional[date] = FieldInfo(alias="lastFiscalYearEnd", default=None) + """Data de encerramento do último ano fiscal (YYYY-MM-DD).""" + + last_split_date: Optional[float] = FieldInfo(alias="lastSplitDate", default=None) + """Data do último desdobramento/grupamento (timestamp UNIX em segundos).""" + + last_split_factor: Optional[str] = FieldInfo(alias="lastSplitFactor", default=None) + """Fator do último desdobramento/grupamento (ex.: 2:1, 1:10).""" + + most_recent_quarter: Optional[date] = FieldInfo(alias="mostRecentQuarter", default=None) + """ + Data de término do trimestre mais recente considerado nos cálculos (YYYY-MM-DD). + """ + + net_income_to_common: Optional[float] = FieldInfo(alias="netIncomeToCommon", default=None) + """Lucro Líquido atribuível aos acionistas ordinários (controladores).""" + + next_fiscal_year_end: Optional[date] = FieldInfo(alias="nextFiscalYearEnd", default=None) + """Data de encerramento do próximo ano fiscal (YYYY-MM-DD).""" + + peg_ratio: Optional[float] = FieldInfo(alias="pegRatio", default=None) + """Índice PEG (P/E dividido pelo crescimento esperado dos lucros).""" + + price_to_book: Optional[float] = FieldInfo(alias="priceToBook", default=None) + """Preço sobre Valor Patrimonial (P/VP): Preço da Ação / VPA.""" + + profit_margins: Optional[float] = FieldInfo(alias="profitMargins", default=None) + """Margem de Lucro Líquida (Lucro Líquido / Receita Líquida). + + Geralmente em base TTM ou anual. + """ + + sand_p52_week_change: Optional[float] = FieldInfo(alias="SandP52WeekChange", default=None) + """Variação percentual do índice S&P 500 nas últimas 52 semanas (para referência).""" + + shares_outstanding: Optional[float] = FieldInfo(alias="sharesOutstanding", default=None) + """Número total de ações ordinárias em circulação.""" + + symbol: Optional[str] = None + """Ticker do ativo ao qual as estatísticas se referem.""" + + total_assets: Optional[float] = FieldInfo(alias="totalAssets", default=None) + """Valor total dos ativos registrado no último balanço (anual ou trimestral).""" + + trailing_eps: Optional[float] = FieldInfo(alias="trailingEps", default=None) + """Lucro Por Ação (LPA) dos Últimos 12 Meses (TTM).""" + + type: Optional[Literal["yearly", "quarterly", "ttm"]] = None + """ + Periodicidade dos dados: `yearly` (anual), `quarterly` (trimestral), `ttm` + (Trailing Twelve Months - últimos 12 meses). + """ + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """ + Data da última atualização deste registro específico na fonte de dados + (YYYY-MM-DD). + """ + + ytd_return: Optional[float] = FieldInfo(alias="ytdReturn", default=None) + """Retorno percentual do preço da ação desde o início do ano atual (Year-to-Date).""" diff --git a/src/brapi/types/financial_data_entry.py b/src/brapi/types/financial_data_entry.py new file mode 100644 index 0000000..3db991a --- /dev/null +++ b/src/brapi/types/financial_data_entry.py @@ -0,0 +1,133 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["FinancialDataEntry"] + + +class FinancialDataEntry(BaseModel): + current_price: Optional[float] = FieldInfo(alias="currentPrice", default=None) + """Preço atual da ação (pode ser ligeiramente defasado).""" + + current_ratio: Optional[float] = FieldInfo(alias="currentRatio", default=None) + """Índice de Liquidez Corrente (Ativo Circulante / Passivo Circulante).""" + + debt_to_equity: Optional[float] = FieldInfo(alias="debtToEquity", default=None) + """Índice Dívida Líquida / Patrimônio Líquido.""" + + earnings_growth: Optional[float] = FieldInfo(alias="earningsGrowth", default=None) + """ + Crescimento do Lucro Líquido (geralmente trimestral YoY, como + `earningsQuarterlyGrowth`). + """ + + ebitda: Optional[float] = None + """Lucro Antes de Juros, Impostos, Depreciação e Amortização (LAJIDA ou EBITDA). + + Geralmente TTM. + """ + + ebitda_margins: Optional[float] = FieldInfo(alias="ebitdaMargins", default=None) + """Margem EBITDA (EBITDA TTM / Receita Líquida TTM).""" + + financial_currency: Optional[str] = FieldInfo(alias="financialCurrency", default=None) + """Moeda na qual os dados financeiros são reportados (ex: `BRL`, `USD`).""" + + free_cashflow: Optional[float] = FieldInfo(alias="freeCashflow", default=None) + """Fluxo de Caixa Livre (FCO - CAPEX) - (geralmente TTM).""" + + gross_margins: Optional[float] = FieldInfo(alias="grossMargins", default=None) + """Margem Bruta (Lucro Bruto TTM / Receita Líquida TTM).""" + + gross_profits: Optional[float] = FieldInfo(alias="grossProfits", default=None) + """Lucro Bruto (geralmente TTM).""" + + number_of_analyst_opinions: Optional[float] = FieldInfo(alias="numberOfAnalystOpinions", default=None) + """Número de opiniões de analistas consideradas.""" + + operating_cashflow: Optional[float] = FieldInfo(alias="operatingCashflow", default=None) + """Fluxo de Caixa das Operações (FCO) - (geralmente TTM).""" + + operating_margins: Optional[float] = FieldInfo(alias="operatingMargins", default=None) + """Margem Operacional (EBIT TTM / Receita Líquida TTM).""" + + profit_margins: Optional[float] = FieldInfo(alias="profitMargins", default=None) + """Margem Líquida (Lucro Líquido TTM / Receita Líquida TTM). + + Sinônimo do campo de mesmo nome em `DefaultKeyStatisticsEntry`. + """ + + quick_ratio: Optional[float] = FieldInfo(alias="quickRatio", default=None) + """Índice de Liquidez Seca ((Ativo Circulante - Estoques) / Passivo Circulante).""" + + recommendation_key: Optional[str] = FieldInfo(alias="recommendationKey", default=None) + """Resumo da recomendação (ex.: strong_buy, buy, hold, sell, strong_sell).""" + + recommendation_mean: Optional[float] = FieldInfo(alias="recommendationMean", default=None) + """Média de recomendações dos analistas (1=Compra Forte, 5=Venda Forte).""" + + return_on_assets: Optional[float] = FieldInfo(alias="returnOnAssets", default=None) + """Retorno sobre Ativos (ROA): Lucro Líquido TTM / Ativo Total Médio.""" + + return_on_equity: Optional[float] = FieldInfo(alias="returnOnEquity", default=None) + """ + Retorno sobre Patrimônio Líquido (ROE): Lucro Líquido TTM / Patrimônio Líquido + Médio. + """ + + revenue_growth: Optional[float] = FieldInfo(alias="revenueGrowth", default=None) + """Crescimento da Receita Líquida (geralmente trimestral YoY).""" + + revenue_per_share: Optional[float] = FieldInfo(alias="revenuePerShare", default=None) + """Receita Líquida por Ação (Receita Líquida TTM / Ações em Circulação).""" + + symbol: Optional[str] = None + """Ticker do ativo ao qual os dados se referem.""" + + target_high_price: Optional[float] = FieldInfo(alias="targetHighPrice", default=None) + """Preço-alvo mais alto estimado por analistas.""" + + target_low_price: Optional[float] = FieldInfo(alias="targetLowPrice", default=None) + """Preço-alvo mais baixo estimado por analistas.""" + + target_mean_price: Optional[float] = FieldInfo(alias="targetMeanPrice", default=None) + """Preço-alvo médio estimado por analistas.""" + + target_median_price: Optional[float] = FieldInfo(alias="targetMedianPrice", default=None) + """Preço-alvo mediano estimado por analistas.""" + + total_cash: Optional[float] = FieldInfo(alias="totalCash", default=None) + """ + Caixa e Equivalentes de Caixa + Aplicações Financeiras de Curto Prazo (último + balanço). + """ + + total_cash_per_share: Optional[float] = FieldInfo(alias="totalCashPerShare", default=None) + """Caixa Total por Ação (Caixa Total / Ações em Circulação).""" + + total_debt: Optional[float] = FieldInfo(alias="totalDebt", default=None) + """ + Dívida Bruta Total (Dívida de Curto Prazo + Dívida de Longo Prazo - último + balanço). + """ + + total_revenue: Optional[float] = FieldInfo(alias="totalRevenue", default=None) + """Receita Líquida Total (geralmente TTM).""" + + type: Optional[Literal["yearly", "quarterly", "ttm"]] = None + """ + Periodicidade dos dados: `yearly` (anual), `quarterly` (trimestral), `ttm` + (Trailing Twelve Months). + """ + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """ + Data da última atualização deste registro específico na fonte de dados + (YYYY-MM-DD). + """ diff --git a/src/brapi/types/income_statement_entry.py b/src/brapi/types/income_statement_entry.py new file mode 100644 index 0000000..7429e0a --- /dev/null +++ b/src/brapi/types/income_statement_entry.py @@ -0,0 +1,198 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["IncomeStatementEntry"] + + +class IncomeStatementEntry(BaseModel): + id: Optional[str] = None + """Identificador único deste registro de DRE (interno).""" + + administrative_costs: Optional[float] = FieldInfo(alias="administrativeCosts", default=None) + """Despesas Administrativas (detalhamento, pode estar contido em SG&A).""" + + basic_earnings_per_common_share: Optional[float] = FieldInfo(alias="basicEarningsPerCommonShare", default=None) + """Lucro Básico por Ação Ordinária (ON).""" + + basic_earnings_per_preferred_share: Optional[float] = FieldInfo( + alias="basicEarningsPerPreferredShare", default=None + ) + """Lucro Básico por Ação Preferencial (PN).""" + + basic_earnings_per_share: Optional[float] = FieldInfo(alias="basicEarningsPerShare", default=None) + """Lucro Básico por Ação (LPA Básico) - Geral.""" + + capitalization_operations: Optional[float] = FieldInfo(alias="capitalizationOperations", default=None) + """Resultado de Operações de Capitalização (específico para Seguradoras).""" + + claims_and_operations_costs: Optional[float] = FieldInfo(alias="claimsAndOperationsCosts", default=None) + """Custos com Sinistros e Operações (específico para Seguradoras).""" + + complementary_pension_operations: Optional[float] = FieldInfo(alias="complementaryPensionOperations", default=None) + """ + Resultado de Operações de Previdência Complementar (específico para + Seguradoras/Previdência). + """ + + cost_of_revenue: Optional[float] = FieldInfo(alias="costOfRevenue", default=None) + """Custo dos Produtos Vendidos (CPV) ou Custo dos Serviços Prestados (CSP).""" + + current_taxes: Optional[float] = FieldInfo(alias="currentTaxes", default=None) + """Imposto de Renda e Contribuição Social Correntes.""" + + deferred_taxes: Optional[float] = FieldInfo(alias="deferredTaxes", default=None) + """Imposto de Renda e Contribuição Social Diferidos.""" + + diluted_earnings_per_common_share: Optional[float] = FieldInfo(alias="dilutedEarningsPerCommonShare", default=None) + """Lucro Diluído por Ação Ordinária (ON).""" + + diluted_earnings_per_preferred_share: Optional[float] = FieldInfo( + alias="dilutedEarningsPerPreferredShare", default=None + ) + """Lucro Diluído por Ação Preferencial (PN).""" + + diluted_earnings_per_share: Optional[float] = FieldInfo(alias="dilutedEarningsPerShare", default=None) + """Lucro Diluído por Ação (LPA Diluído) - Geral.""" + + discontinued_operations: Optional[float] = FieldInfo(alias="discontinuedOperations", default=None) + """Resultado Líquido das Operações Descontinuadas.""" + + earnings_per_share: Optional[float] = FieldInfo(alias="earningsPerShare", default=None) + """Lucro por Ação (LPA) - Geral (pode ser básico ou diluído, verificar contexto).""" + + ebit: Optional[float] = None + """Lucro Antes dos Juros e Impostos (LAJIR ou EBIT). + + Geralmente igual a `operatingIncome`. + """ + + effect_of_accounting_charges: Optional[float] = FieldInfo(alias="effectOfAccountingCharges", default=None) + """Efeito de Mudanças Contábeis.""" + + end_date: Optional[date] = FieldInfo(alias="endDate", default=None) + """Data de término do período fiscal ao qual a DRE se refere (YYYY-MM-DD).""" + + equity_income_result: Optional[float] = FieldInfo(alias="equityIncomeResult", default=None) + """Resultado de Equivalência Patrimonial.""" + + extraordinary_items: Optional[float] = FieldInfo(alias="extraordinaryItems", default=None) + """Itens Extraordinários.""" + + financial_expenses: Optional[float] = FieldInfo(alias="financialExpenses", default=None) + """Despesas Financeiras (valor positivo aqui, diferente de `interestExpense`).""" + + financial_income: Optional[float] = FieldInfo(alias="financialIncome", default=None) + """Receitas Financeiras.""" + + financial_result: Optional[float] = FieldInfo(alias="financialResult", default=None) + """Resultado Financeiro Líquido.""" + + gross_profit: Optional[float] = FieldInfo(alias="grossProfit", default=None) + """Lucro Bruto (Receita Líquida - CPV/CSP).""" + + income_before_statutory_participations_and_contributions: Optional[float] = FieldInfo( + alias="incomeBeforeStatutoryParticipationsAndContributions", default=None + ) + """Resultado Antes das Participações Estatutárias.""" + + income_before_tax: Optional[float] = FieldInfo(alias="incomeBeforeTax", default=None) + """Lucro Antes do Imposto de Renda e Contribuição Social (LAIR). + + EBIT + Resultado Financeiro. + """ + + income_tax_expense: Optional[float] = FieldInfo(alias="incomeTaxExpense", default=None) + """Imposto de Renda e Contribuição Social sobre o Lucro.""" + + insurance_operations: Optional[float] = FieldInfo(alias="insuranceOperations", default=None) + """Resultado de Operações de Seguros (específico para Seguradoras).""" + + interest_expense: Optional[float] = FieldInfo(alias="interestExpense", default=None) + """Despesas Financeiras (Juros pagos). Note que este campo é negativo.""" + + losses_due_to_non_recoverability_of_assets: Optional[float] = FieldInfo( + alias="lossesDueToNonRecoverabilityOfAssets", default=None + ) + """Perdas por Não Recuperabilidade de Ativos (Impairment).""" + + minority_interest: Optional[float] = FieldInfo(alias="minorityInterest", default=None) + """Participação de Acionistas Não Controladores (no Lucro Líquido).""" + + net_income: Optional[float] = FieldInfo(alias="netIncome", default=None) + """Lucro Líquido Consolidado do Período.""" + + net_income_applicable_to_common_shares: Optional[float] = FieldInfo( + alias="netIncomeApplicableToCommonShares", default=None + ) + """Lucro Líquido Atribuível aos Acionistas Controladores (Ações Ordinárias).""" + + net_income_from_continuing_ops: Optional[float] = FieldInfo(alias="netIncomeFromContinuingOps", default=None) + """Lucro Líquido das Operações Continuadas.""" + + non_recurring: Optional[float] = FieldInfo(alias="nonRecurring", default=None) + """Itens Não Recorrentes (pode incluir outras despesas/receitas operacionais).""" + + operating_income: Optional[float] = FieldInfo(alias="operatingIncome", default=None) + """Lucro Operacional (EBIT - Earnings Before Interest and Taxes). + + Lucro Bruto - Despesas Operacionais. + """ + + other_items: Optional[float] = FieldInfo(alias="otherItems", default=None) + """Outros Itens.""" + + other_operating_expenses: Optional[float] = FieldInfo(alias="otherOperatingExpenses", default=None) + """Outras Despesas Operacionais.""" + + other_operating_income: Optional[float] = FieldInfo(alias="otherOperatingIncome", default=None) + """Outras Receitas Operacionais (detalhamento).""" + + other_operating_income_and_expenses: Optional[float] = FieldInfo( + alias="otherOperatingIncomeAndExpenses", default=None + ) + """Outras Receitas e Despesas Operacionais (agregado).""" + + profit_sharing_and_statutory_contributions: Optional[float] = FieldInfo( + alias="profitSharingAndStatutoryContributions", default=None + ) + """Participações nos Lucros e Contribuições Estatutárias.""" + + reinsurance_operations: Optional[float] = FieldInfo(alias="reinsuranceOperations", default=None) + """Resultado de Operações de Resseguros (específico para Seguradoras).""" + + research_development: Optional[float] = FieldInfo(alias="researchDevelopment", default=None) + """Despesas com Pesquisa e Desenvolvimento.""" + + sales_expenses: Optional[float] = FieldInfo(alias="salesExpenses", default=None) + """Despesas com Vendas (detalhamento, pode estar contido em SG&A).""" + + selling_general_administrative: Optional[float] = FieldInfo(alias="sellingGeneralAdministrative", default=None) + """Despesas com Vendas, Gerais e Administrativas.""" + + symbol: Optional[str] = None + """Ticker do ativo ao qual a DRE se refere.""" + + total_operating_expenses: Optional[float] = FieldInfo(alias="totalOperatingExpenses", default=None) + """Total das Despesas Operacionais (P&D + SG&A + Outras).""" + + total_other_income_expense_net: Optional[float] = FieldInfo(alias="totalOtherIncomeExpenseNet", default=None) + """Resultado Financeiro Líquido + Outras Receitas/Despesas.""" + + total_revenue: Optional[float] = FieldInfo(alias="totalRevenue", default=None) + """Receita Operacional Líquida.""" + + type: Optional[Literal["yearly", "quarterly"]] = None + """Indica a periodicidade da DRE: `yearly` (anual) ou `quarterly` (trimestral).""" + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """ + Data da última atualização deste registro específico na fonte de dados + (YYYY-MM-DD). + """ diff --git a/src/brapi/types/quote_list_params.py b/src/brapi/types/quote_list_params.py new file mode 100644 index 0000000..9038560 --- /dev/null +++ b/src/brapi/types/quote_list_params.py @@ -0,0 +1,86 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Annotated, TypedDict + +from .._utils import PropertyInfo + +__all__ = ["QuoteListParams"] + + +class QuoteListParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + limit: int + """**Opcional.** Número máximo de ativos a serem retornados por página. + + O valor padrão pode variar. + """ + + page: int + """ + **Opcional.** Número da página dos resultados a ser retornada, considerando o + `limit` especificado. Começa em 1. + """ + + search: str + """**Opcional.** Termo para buscar ativos por ticker (correspondência parcial). + + Ex: `PETR` encontrará `PETR4`, `PETR3`. + """ + + sector: Literal[ + "Retail Trade", + "Energy Minerals", + "Health Services", + "Utilities", + "Finance", + "Consumer Services", + "Consumer Non-Durables", + "Non-Energy Minerals", + "Commercial Services", + "Distribution Services", + "Transportation", + "Technology Services", + "Process Industries", + "Communications", + "Producer Manufacturing", + "Miscellaneous", + "Electronic Technology", + "Industrial Services", + "Health Technology", + "Consumer Durables", + ] + """**Opcional.** Filtra os resultados por setor de atuação da empresa. + + Utilize um dos valores retornados em `availableSectors`. + """ + + sort_by: Annotated[ + Literal["name", "close", "change", "change_abs", "volume", "market_cap_basic", "sector"], + PropertyInfo(alias="sortBy"), + ] + """**Opcional.** Campo pelo qual os resultados serão ordenados.""" + + sort_order: Annotated[Literal["asc", "desc"], PropertyInfo(alias="sortOrder")] + """**Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + + Requer que `sortBy` seja especificado. + """ + + type: Literal["stock", "fund", "bdr"] + """**Opcional.** Filtra os resultados por tipo de ativo.""" diff --git a/src/brapi/types/quote_list_response.py b/src/brapi/types/quote_list_response.py new file mode 100644 index 0000000..b1ca5d7 --- /dev/null +++ b/src/brapi/types/quote_list_response.py @@ -0,0 +1,99 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["QuoteListResponse", "Index", "Stock"] + + +class Index(BaseModel): + name: Optional[str] = None + """Nome do índice (ex: `IBOVESPA`).""" + + stock: Optional[str] = None + """Ticker do índice (ex: `^BVSP`).""" + + +class Stock(BaseModel): + change: Optional[float] = None + """Variação percentual do preço em relação ao fechamento anterior.""" + + close: Optional[float] = None + """Preço de fechamento mais recente ou último preço negociado.""" + + logo: Optional[str] = None + """URL para a imagem do logo da empresa/ativo.""" + + market_cap: Optional[float] = None + """Capitalização de mercado (Preço x Quantidade de Ações). + + Pode ser nulo para FIIs ou outros tipos. + """ + + name: Optional[str] = None + """Nome do ativo ou empresa (ex: `PETROBRAS PN`).""" + + sector: Optional[str] = None + """Setor de atuação da empresa (ex: `Energy Minerals`, `Finance`). + + Pode ser nulo ou variar para FIIs. + """ + + stock: Optional[str] = None + """Ticker do ativo (ex: `PETR4`, `MXRF11`).""" + + type: Optional[Literal["stock", "fund", "bdr"]] = None + """ + Tipo do ativo: `stock` (Ação), `fund` (Fundo Imobiliário/FII), `bdr` (Brazilian + Depositary Receipt). + """ + + volume: Optional[int] = None + """Volume financeiro negociado no último pregão ou dia atual.""" + + +class QuoteListResponse(BaseModel): + available_sectors: Optional[List[str]] = FieldInfo(alias="availableSectors", default=None) + """ + Lista de todos os setores disponíveis que podem ser usados no parâmetro de + filtro `sector`. + """ + + available_stock_types: Optional[List[Literal["stock", "fund", "bdr"]]] = FieldInfo( + alias="availableStockTypes", default=None + ) + """ + Lista dos tipos de ativos (`stock`, `fund`, `bdr`) disponíveis que podem ser + usados no parâmetro de filtro `type`. + """ + + current_page: Optional[int] = FieldInfo(alias="currentPage", default=None) + """Número da página atual retornada nos resultados.""" + + has_next_page: Optional[bool] = FieldInfo(alias="hasNextPage", default=None) + """ + Indica se existe uma próxima página de resultados (`true`) ou se esta é a última + página (`false`). + """ + + indexes: Optional[List[Index]] = None + """Lista resumida de índices relevantes (geralmente inclui IBOVESPA).""" + + items_per_page: Optional[int] = FieldInfo(alias="itemsPerPage", default=None) + """Número de itens (ativos) retornados por página (conforme `limit` ou padrão).""" + + stocks: Optional[List[Stock]] = None + """Lista paginada e filtrada dos ativos solicitados.""" + + total_count: Optional[int] = FieldInfo(alias="totalCount", default=None) + """ + Número total de ativos encontrados que correspondem aos filtros aplicados (sem + considerar a paginação). + """ + + total_pages: Optional[int] = FieldInfo(alias="totalPages", default=None) + """Número total de páginas existentes para a consulta/filtros aplicados.""" diff --git a/src/brapi/types/quote_retrieve_params.py b/src/brapi/types/quote_retrieve_params.py new file mode 100644 index 0000000..e219ac0 --- /dev/null +++ b/src/brapi/types/quote_retrieve_params.py @@ -0,0 +1,113 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Literal, TypedDict + +__all__ = ["QuoteRetrieveParams"] + + +class QuoteRetrieveParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + dividends: bool + """**Opcional.** Booleano (`true` ou `false`). + + Se `true`, inclui informações sobre dividendos e JCP (Juros sobre Capital + Próprio) pagos historicamente pelo ativo na chave `dividendsData`. + """ + + fundamental: bool + """**Opcional.** Booleano (`true` ou `false`). + + Se `true`, inclui dados fundamentalistas básicos na resposta, como Preço/Lucro + (P/L) e Lucro Por Ação (LPA). + + **Nota:** Para dados fundamentalistas mais completos, utilize o parâmetro + `modules`. + """ + + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + """ + **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. + + **Valores Possíveis:** + + - `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`: Intervalos intraday + (minutos/horas). **Atenção:** Disponibilidade pode variar conforme o `range` e + o ativo. + - `1d`: Diário (padrão se `range` for especificado e `interval` omitido). + - `5d`: 5 dias. + - `1wk`: Semanal. + - `1mo`: Mensal. + - `3mo`: Trimestral. + """ + + modules: List[ + Literal[ + "summaryProfile", + "balanceSheetHistory", + "defaultKeyStatistics", + "balanceSheetHistoryQuarterly", + "incomeStatementHistory", + "incomeStatementHistoryQuarterly", + "financialData", + "financialDataHistory", + "financialDataHistoryQuarterly", + "defaultKeyStatisticsHistory", + "defaultKeyStatisticsHistoryQuarterly", + "valueAddedHistory", + "valueAddedHistoryQuarterly", + "cashflowHistory", + "cashflowHistoryQuarterly", + ] + ] + """ + **Opcional.** Uma lista de módulos de dados adicionais, separados por vírgula + (`,`), para incluir na resposta. Permite buscar dados financeiros detalhados. + + **Exemplos:** + + - `modules=summaryProfile` (retorna perfil da empresa) + - `modules=balanceSheetHistory,incomeStatementHistory` (retorna histórico anual + do BP e DRE) + + Veja a descrição principal do endpoint para a lista completa de módulos e seus + conteúdos. + """ + + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] + """ + **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Se omitido, apenas a cotação mais recente é retornada + (a menos que `interval` seja usado). + + **Valores Possíveis:** + + - `1d`: Último dia de pregão (intraday se `interval` for minutos/horas). + - `5d`: Últimos 5 dias. + - `1mo`: Último mês. + - `3mo`: Últimos 3 meses. + - `6mo`: Últimos 6 meses. + - `1y`: Último ano. + - `2y`: Últimos 2 anos. + - `5y`: Últimos 5 anos. + - `10y`: Últimos 10 anos. + - `ytd`: Desde o início do ano atual (Year-to-Date). + - `max`: Todo o período histórico disponível. + """ diff --git a/src/brapi/types/quote_retrieve_response.py b/src/brapi/types/quote_retrieve_response.py new file mode 100644 index 0000000..8f9e542 --- /dev/null +++ b/src/brapi/types/quote_retrieve_response.py @@ -0,0 +1,476 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from pydantic import Field as FieldInfo + +from .._models import BaseModel +from .cashflow_entry import CashflowEntry +from .value_added_entry import ValueAddedEntry +from .balance_sheet_entry import BalanceSheetEntry +from .financial_data_entry import FinancialDataEntry +from .income_statement_entry import IncomeStatementEntry +from .default_key_statistics_entry import DefaultKeyStatisticsEntry + +__all__ = [ + "QuoteRetrieveResponse", + "Result", + "ResultDividendsData", + "ResultDividendsDataCashDividend", + "ResultDividendsDataStockDividend", + "ResultHistoricalDataPrice", + "ResultSummaryProfile", +] + + +class ResultDividendsDataCashDividend(BaseModel): + approved_on: Optional[datetime] = FieldInfo(alias="approvedOn", default=None) + """Data em que o pagamento do provento foi aprovado pela empresa. + + Pode ser uma estimativa em alguns casos. Formato ISO 8601. + """ + + asset_issued: Optional[str] = FieldInfo(alias="assetIssued", default=None) + """Ticker do ativo que pagou o provento (ex: `ITSA4`). + + Pode incluir sufixos específicos relacionados ao evento. + """ + + isin_code: Optional[str] = FieldInfo(alias="isinCode", default=None) + """ + Código ISIN (International Securities Identification Number) do ativo + relacionado ao provento. + """ + + label: Optional[str] = None + """Tipo do provento em dinheiro. + + Geralmente `DIVIDENDO` ou `JCP` (Juros sobre Capital Próprio). + """ + + last_date_prior: Optional[datetime] = FieldInfo(alias="lastDatePrior", default=None) + """Data Com (Ex-Date). + + Último dia em que era necessário possuir o ativo para ter direito a receber este + provento. Pode ser uma estimativa. Formato ISO 8601. + """ + + payment_date: Optional[datetime] = FieldInfo(alias="paymentDate", default=None) + """Data efetiva em que o pagamento foi realizado (ou está previsto). + + Formato ISO 8601. + """ + + rate: Optional[float] = None + """Valor bruto do provento pago por unidade do ativo (por ação, por cota).""" + + related_to: Optional[str] = FieldInfo(alias="relatedTo", default=None) + """ + Descrição do período ou evento ao qual o provento se refere (ex: + `1º Trimestre/2023`, `Resultado 2022`). + """ + + remarks: Optional[str] = None + """Observações adicionais ou informações relevantes sobre o provento.""" + + +class ResultDividendsDataStockDividend(BaseModel): + approved_on: Optional[datetime] = FieldInfo(alias="approvedOn", default=None) + """Data em que o evento foi aprovado. Formato ISO 8601.""" + + asset_issued: Optional[str] = FieldInfo(alias="assetIssued", default=None) + """Ticker do ativo afetado pelo evento.""" + + complete_factor: Optional[str] = FieldInfo(alias="completeFactor", default=None) + """Descrição textual do fator (ex: `1 / 10`, `10 / 1`).""" + + factor: Optional[float] = None + """Fator numérico do evento. + + - **Bonificação:** Percentual (ex: 0.1 para 10%). + - **Desdobramento/Grupamento:** Fator multiplicativo ou divisor. + """ + + isin_code: Optional[str] = FieldInfo(alias="isinCode", default=None) + """Código ISIN do ativo.""" + + label: Optional[str] = None + """Tipo do evento: `DESDOBRAMENTO`, `GRUPAMENTO`, `BONIFICACAO`.""" + + last_date_prior: Optional[datetime] = FieldInfo(alias="lastDatePrior", default=None) + """Data Com (Ex-Date). + + Último dia para possuir o ativo nas condições antigas. Formato ISO 8601. + """ + + remarks: Optional[str] = None + """Observações adicionais sobre o evento.""" + + +class ResultDividendsData(BaseModel): + cash_dividends: Optional[List[ResultDividendsDataCashDividend]] = FieldInfo(alias="cashDividends", default=None) + """Lista de proventos pagos em dinheiro (Dividendos e JCP).""" + + stock_dividends: Optional[List[ResultDividendsDataStockDividend]] = FieldInfo(alias="stockDividends", default=None) + """Lista de eventos corporativos (Desdobramento, Grupamento, Bonificação).""" + + subscriptions: Optional[List[object]] = None + """Lista de eventos de subscrição de ações (estrutura não detalhada aqui).""" + + +class ResultHistoricalDataPrice(BaseModel): + adjusted_close: Optional[float] = FieldInfo(alias="adjustedClose", default=None) + """ + Preço de fechamento ajustado para proventos (dividendos, JCP, bonificações, + etc.) e desdobramentos/grupamentos. + """ + + close: Optional[float] = None + """Preço de fechamento do ativo no intervalo.""" + + date: Optional[int] = None + """ + Data do pregão ou do ponto de dados, representada como um timestamp UNIX (número + de segundos desde 1970-01-01 UTC). + """ + + high: Optional[float] = None + """Preço máximo atingido pelo ativo no intervalo.""" + + low: Optional[float] = None + """Preço mínimo atingido pelo ativo no intervalo.""" + + open: Optional[float] = None + """Preço de abertura do ativo no intervalo (dia, semana, mês, etc.).""" + + volume: Optional[int] = None + """Volume financeiro negociado no intervalo.""" + + +class ResultSummaryProfile(BaseModel): + address1: Optional[str] = None + """Linha 1 do endereço da sede da empresa.""" + + address2: Optional[str] = None + """Linha 2 do endereço da sede da empresa (complemento).""" + + city: Optional[str] = None + """Cidade da sede da empresa.""" + + company_officers: Optional[List[object]] = FieldInfo(alias="companyOfficers", default=None) + """ + Lista de diretores e executivos principais da empresa (estrutura interna do + objeto não detalhada aqui). + """ + + country: Optional[str] = None + """País da sede da empresa.""" + + full_time_employees: Optional[int] = FieldInfo(alias="fullTimeEmployees", default=None) + """Número estimado de funcionários em tempo integral.""" + + industry: Optional[str] = None + """Nome da indústria em que a empresa atua.""" + + industry_disp: Optional[str] = FieldInfo(alias="industryDisp", default=None) + """Nome de exibição formatado para a indústria.""" + + industry_key: Optional[str] = FieldInfo(alias="industryKey", default=None) + """Chave interna ou código para a indústria.""" + + long_business_summary: Optional[str] = FieldInfo(alias="longBusinessSummary", default=None) + """Descrição longa e detalhada sobre as atividades e o negócio da empresa.""" + + phone: Optional[str] = None + """Número de telefone principal da empresa.""" + + sector: Optional[str] = None + """Nome do setor de atuação da empresa.""" + + sector_disp: Optional[str] = FieldInfo(alias="sectorDisp", default=None) + """Nome de exibição formatado para o setor.""" + + sector_key: Optional[str] = FieldInfo(alias="sectorKey", default=None) + """Chave interna ou código para o setor.""" + + state: Optional[str] = None + """Estado ou província da sede da empresa.""" + + website: Optional[str] = None + """URL do website oficial da empresa.""" + + zip: Optional[str] = None + """Código Postal (CEP) da sede da empresa.""" + + +class Result(BaseModel): + average_daily_volume10_day: Optional[float] = FieldInfo(alias="averageDailyVolume10Day", default=None) + """Média do volume financeiro diário negociado nos últimos 10 dias.""" + + average_daily_volume3_month: Optional[float] = FieldInfo(alias="averageDailyVolume3Month", default=None) + """Média do volume financeiro diário negociado nos últimos 3 meses.""" + + balance_sheet_history: Optional[List[BalanceSheetEntry]] = FieldInfo(alias="balanceSheetHistory", default=None) + """Histórico **anual** do Balanço Patrimonial. + + Retornado apenas se `modules` incluir `balanceSheetHistory`. + """ + + balance_sheet_history_quarterly: Optional[List[BalanceSheetEntry]] = FieldInfo( + alias="balanceSheetHistoryQuarterly", default=None + ) + """Histórico **trimestral** do Balanço Patrimonial. + + Retornado apenas se `modules` incluir `balanceSheetHistoryQuarterly`. + """ + + cashflow_history: Optional[List[CashflowEntry]] = FieldInfo(alias="cashflowHistory", default=None) + """Histórico **anual** da Demonstração do Fluxo de Caixa (DFC). + + Retornado apenas se `modules` incluir `cashflowHistory`. + """ + + cashflow_history_quarterly: Optional[List[CashflowEntry]] = FieldInfo( + alias="cashflowHistoryQuarterly", default=None + ) + """Histórico **trimestral** da Demonstração do Fluxo de Caixa (DFC). + + Retornado apenas se `modules` incluir `cashflowHistoryQuarterly`. + """ + + currency: Optional[str] = None + """Moeda na qual os valores monetários são expressos (geralmente `BRL`).""" + + default_key_statistics: Optional[DefaultKeyStatisticsEntry] = FieldInfo(alias="defaultKeyStatistics", default=None) + """Principais estatísticas financeiras atuais/TTM. + + Retornado apenas se `modules` incluir `defaultKeyStatistics`. + """ + + default_key_statistics_history: Optional[List[DefaultKeyStatisticsEntry]] = FieldInfo( + alias="defaultKeyStatisticsHistory", default=None + ) + """Histórico **anual** das principais estatísticas. + + Retornado apenas se `modules` incluir `defaultKeyStatisticsHistory`. + """ + + default_key_statistics_history_quarterly: Optional[List[DefaultKeyStatisticsEntry]] = FieldInfo( + alias="defaultKeyStatisticsHistoryQuarterly", default=None + ) + """Histórico **trimestral** das principais estatísticas. + + Retornado apenas se `modules` incluir `defaultKeyStatisticsHistoryQuarterly`. + """ + + dividends_data: Optional[ResultDividendsData] = FieldInfo(alias="dividendsData", default=None) + """Objeto contendo informações sobre dividendos, JCP e outros eventos corporativos. + + Retornado apenas se `dividends=true` for especificado na requisição. + """ + + earnings_per_share: Optional[float] = FieldInfo(alias="earningsPerShare", default=None) + """Lucro Por Ação (LPA) dos últimos 12 meses (TTM). + + Retornado se `fundamental=true`. + """ + + fifty_two_week_high: Optional[float] = FieldInfo(alias="fiftyTwoWeekHigh", default=None) + """Preço máximo atingido nas últimas 52 semanas.""" + + fifty_two_week_high_change: Optional[float] = FieldInfo(alias="fiftyTwoWeekHighChange", default=None) + """Variação absoluta entre o preço atual e o preço máximo das últimas 52 semanas.""" + + fifty_two_week_high_change_percent: Optional[float] = FieldInfo(alias="fiftyTwoWeekHighChangePercent", default=None) + """ + Variação percentual entre o preço atual e o preço máximo das últimas 52 semanas. + """ + + fifty_two_week_low: Optional[float] = FieldInfo(alias="fiftyTwoWeekLow", default=None) + """Preço mínimo atingido nas últimas 52 semanas.""" + + fifty_two_week_low_change: Optional[float] = FieldInfo(alias="fiftyTwoWeekLowChange", default=None) + """Variação absoluta entre o preço atual e o preço mínimo das últimas 52 semanas.""" + + fifty_two_week_range: Optional[str] = FieldInfo(alias="fiftyTwoWeekRange", default=None) + """ + String formatada mostrando o intervalo de preço das últimas 52 semanas (Mínimo - + Máximo). + """ + + financial_data: Optional[FinancialDataEntry] = FieldInfo(alias="financialData", default=None) + """Dados financeiros e indicadores TTM. + + Retornado apenas se `modules` incluir `financialData`. + """ + + financial_data_history: Optional[List[FinancialDataEntry]] = FieldInfo(alias="financialDataHistory", default=None) + """Histórico **anual** de dados financeiros e indicadores. + + Retornado apenas se `modules` incluir `financialDataHistory`. + """ + + financial_data_history_quarterly: Optional[List[FinancialDataEntry]] = FieldInfo( + alias="financialDataHistoryQuarterly", default=None + ) + """Histórico **trimestral** de dados financeiros e indicadores. + + Retornado apenas se `modules` incluir `financialDataHistoryQuarterly`. + """ + + historical_data_price: Optional[List[ResultHistoricalDataPrice]] = FieldInfo( + alias="historicalDataPrice", default=None + ) + """ + Array contendo a série histórica de preços, retornado apenas se os parâmetros + `range` e/ou `interval` forem especificados na requisição. + """ + + income_statement_history: Optional[List[IncomeStatementEntry]] = FieldInfo( + alias="incomeStatementHistory", default=None + ) + """Histórico **anual** da Demonstração do Resultado (DRE). + + Retornado apenas se `modules` incluir `incomeStatementHistory`. + """ + + income_statement_history_quarterly: Optional[List[IncomeStatementEntry]] = FieldInfo( + alias="incomeStatementHistoryQuarterly", default=None + ) + """Histórico **trimestral** da Demonstração do Resultado (DRE). + + Retornado apenas se `modules` incluir `incomeStatementHistoryQuarterly`. + """ + + logourl: Optional[str] = None + """URL da imagem do logo do ativo/empresa.""" + + long_name: Optional[str] = FieldInfo(alias="longName", default=None) + """Nome longo ou completo da empresa ou ativo.""" + + market_cap: Optional[float] = FieldInfo(alias="marketCap", default=None) + """Capitalização de mercado total do ativo (Preço Atual x Ações em Circulação).""" + + price_earnings: Optional[float] = FieldInfo(alias="priceEarnings", default=None) + """Indicador Preço/Lucro (P/L): Preço Atual / Lucro Por Ação (LPA) TTM. + + Retornado se `fundamental=true`. + """ + + regular_market_change: Optional[float] = FieldInfo(alias="regularMarketChange", default=None) + """Variação absoluta do preço no dia atual em relação ao fechamento anterior.""" + + regular_market_change_percent: Optional[float] = FieldInfo(alias="regularMarketChangePercent", default=None) + """Variação percentual do preço no dia atual em relação ao fechamento anterior.""" + + regular_market_day_high: Optional[float] = FieldInfo(alias="regularMarketDayHigh", default=None) + """Preço máximo atingido no dia de negociação atual.""" + + regular_market_day_low: Optional[float] = FieldInfo(alias="regularMarketDayLow", default=None) + """Preço mínimo atingido no dia de negociação atual.""" + + regular_market_day_range: Optional[str] = FieldInfo(alias="regularMarketDayRange", default=None) + """String formatada mostrando o intervalo de preço do dia (Mínimo - Máximo).""" + + regular_market_open: Optional[float] = FieldInfo(alias="regularMarketOpen", default=None) + """Preço de abertura no dia de negociação atual.""" + + regular_market_previous_close: Optional[float] = FieldInfo(alias="regularMarketPreviousClose", default=None) + """Preço de fechamento do pregão anterior.""" + + regular_market_price: Optional[float] = FieldInfo(alias="regularMarketPrice", default=None) + """Preço atual ou do último negócio registrado.""" + + regular_market_time: Optional[datetime] = FieldInfo(alias="regularMarketTime", default=None) + """Data e hora da última atualização da cotação (último negócio registrado). + + Formato ISO 8601. + """ + + regular_market_volume: Optional[float] = FieldInfo(alias="regularMarketVolume", default=None) + """Volume financeiro negociado no dia atual.""" + + short_name: Optional[str] = FieldInfo(alias="shortName", default=None) + """Nome curto ou abreviado da empresa ou ativo.""" + + summary_profile: Optional[ResultSummaryProfile] = FieldInfo(alias="summaryProfile", default=None) + """Resumo do perfil da empresa. + + Retornado apenas se `modules` incluir `summaryProfile`. + """ + + symbol: Optional[str] = None + """Ticker (símbolo) do ativo (ex: `PETR4`, `^BVSP`).""" + + two_hundred_day_average: Optional[float] = FieldInfo(alias="twoHundredDayAverage", default=None) + """Média móvel simples dos preços de fechamento dos últimos 200 dias.""" + + two_hundred_day_average_change: Optional[float] = FieldInfo(alias="twoHundredDayAverageChange", default=None) + """Variação absoluta entre o preço atual e a média de 200 dias.""" + + two_hundred_day_average_change_percent: Optional[float] = FieldInfo( + alias="twoHundredDayAverageChangePercent", default=None + ) + """Variação percentual entre o preço atual e a média de 200 dias.""" + + updated_at: Optional[datetime] = FieldInfo(alias="updatedAt", default=None) + """ + Timestamp da última atualização dos dados do índice na fonte (aplicável + principalmente a índices, como `^BVSP`). Formato ISO 8601. + """ + + used_interval: Optional[str] = FieldInfo(alias="usedInterval", default=None) + """ + O intervalo (`interval`) efetivamente utilizado pela API para retornar os dados + históricos, caso solicitado. + """ + + used_range: Optional[str] = FieldInfo(alias="usedRange", default=None) + """ + O período (`range`) efetivamente utilizado pela API para retornar os dados + históricos, caso solicitado. + """ + + valid_intervals: Optional[List[str]] = FieldInfo(alias="validIntervals", default=None) + """ + Lista dos valores válidos que podem ser utilizados no parâmetro `interval` para + este ativo específico. + """ + + valid_ranges: Optional[List[str]] = FieldInfo(alias="validRanges", default=None) + """ + Lista dos valores válidos que podem ser utilizados no parâmetro `range` para + este ativo específico. + """ + + value_added_history: Optional[List[ValueAddedEntry]] = FieldInfo(alias="valueAddedHistory", default=None) + """Histórico **anual** da Demonstração do Valor Adicionado (DVA). + + Retornado apenas se `modules` incluir `valueAddedHistory`. + """ + + value_added_history_quarterly: Optional[List[ValueAddedEntry]] = FieldInfo( + alias="valueAddedHistoryQuarterly", default=None + ) + """Histórico **trimestral** da Demonstração do Valor Adicionado (DVA). + + Retornado apenas se `modules` incluir `valueAddedHistoryQuarterly`. + """ + + +class QuoteRetrieveResponse(BaseModel): + requested_at: Optional[datetime] = FieldInfo(alias="requestedAt", default=None) + """Timestamp indicando quando a requisição foi recebida pelo servidor. + + Formato ISO 8601. + """ + + results: Optional[List[Result]] = None + """Array contendo os resultados detalhados para cada ticker solicitado.""" + + took: Optional[str] = None + """ + Tempo aproximado que o servidor levou para processar a requisição, em formato de + string (ex: `746ms`). + """ diff --git a/src/brapi/types/v2/__init__.py b/src/brapi/types/v2/__init__.py new file mode 100644 index 0000000..febc9e5 --- /dev/null +++ b/src/brapi/types/v2/__init__.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .crypto_retrieve_params import CryptoRetrieveParams as CryptoRetrieveParams +from .crypto_retrieve_response import CryptoRetrieveResponse as CryptoRetrieveResponse +from .currency_retrieve_params import CurrencyRetrieveParams as CurrencyRetrieveParams +from .inflation_retrieve_params import InflationRetrieveParams as InflationRetrieveParams +from .currency_retrieve_response import CurrencyRetrieveResponse as CurrencyRetrieveResponse +from .prime_rate_retrieve_params import PrimeRateRetrieveParams as PrimeRateRetrieveParams +from .inflation_retrieve_response import InflationRetrieveResponse as InflationRetrieveResponse +from .crypto_list_available_params import CryptoListAvailableParams as CryptoListAvailableParams +from .prime_rate_retrieve_response import PrimeRateRetrieveResponse as PrimeRateRetrieveResponse +from .crypto_list_available_response import CryptoListAvailableResponse as CryptoListAvailableResponse +from .currency_list_available_params import CurrencyListAvailableParams as CurrencyListAvailableParams +from .inflation_list_available_params import InflationListAvailableParams as InflationListAvailableParams +from .currency_list_available_response import CurrencyListAvailableResponse as CurrencyListAvailableResponse +from .prime_rate_list_available_params import PrimeRateListAvailableParams as PrimeRateListAvailableParams +from .inflation_list_available_response import InflationListAvailableResponse as InflationListAvailableResponse +from .prime_rate_list_available_response import PrimeRateListAvailableResponse as PrimeRateListAvailableResponse diff --git a/src/brapi/types/v2/crypto_list_available_params.py b/src/brapi/types/v2/crypto_list_available_params.py new file mode 100644 index 0000000..0846d0d --- /dev/null +++ b/src/brapi/types/v2/crypto_list_available_params.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["CryptoListAvailableParams"] + + +class CryptoListAvailableParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + search: str + """ + **Opcional.** Termo para filtrar a lista de siglas de criptomoedas + (correspondência parcial, case-insensitive). Se omitido, retorna todas as + siglas. + """ diff --git a/src/brapi/types/v2/crypto_list_available_response.py b/src/brapi/types/v2/crypto_list_available_response.py new file mode 100644 index 0000000..5951e9a --- /dev/null +++ b/src/brapi/types/v2/crypto_list_available_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["CryptoListAvailableResponse"] + + +class CryptoListAvailableResponse(BaseModel): + coins: Optional[List[str]] = None + """ + Lista de siglas (tickers) das criptomoedas disponíveis (ex: `BTC`, `ETH`, + `LTC`). + """ diff --git a/src/brapi/types/v2/crypto_retrieve_params.py b/src/brapi/types/v2/crypto_retrieve_params.py new file mode 100644 index 0000000..cbd226b --- /dev/null +++ b/src/brapi/types/v2/crypto_retrieve_params.py @@ -0,0 +1,59 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["CryptoRetrieveParams"] + + +class CryptoRetrieveParams(TypedDict, total=False): + coin: Required[str] + """ + **Obrigatório.** Uma ou mais siglas (tickers) de criptomoedas que você deseja + consultar. Separe múltiplas siglas por vírgula (`,`). + + - **Exemplos:** `BTC`, `ETH,ADA`, `SOL`. + """ + + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + currency: str + """ + **Opcional.** A sigla da moeda fiduciária na qual a cotação da(s) criptomoeda(s) + deve ser retornada. Se omitido, o padrão é `BRL` (Real Brasileiro). + """ + + interval: Literal["1m", "2m", "5m", "15m", "30m", "60m", "90m", "1h", "1d", "5d", "1wk", "1mo", "3mo"] + """ + **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço + (`historicalDataPrice`). Requer que `range` também seja especificado. Funciona + de forma análoga ao endpoint de ações. + + - Valores: `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`, `1d`, `5d`, + `1wk`, `1mo`, `3mo`. + """ + + range: Literal["1d", "5d", "1mo", "3mo", "6mo", "1y", "2y", "5y", "10y", "ytd", "max"] + """ + **Opcional.** Define o período para os dados históricos de preço + (`historicalDataPrice`). Funciona de forma análoga ao endpoint de ações. Se + omitido, apenas a cotação mais recente é retornada (a menos que `interval` seja + usado). + + - Valores: `1d`, `5d`, `1mo`, `3mo`, `6mo`, `1y`, `2y`, `5y`, `10y`, `ytd`, + `max`. + """ diff --git a/src/brapi/types/v2/crypto_retrieve_response.py b/src/brapi/types/v2/crypto_retrieve_response.py new file mode 100644 index 0000000..11d50b7 --- /dev/null +++ b/src/brapi/types/v2/crypto_retrieve_response.py @@ -0,0 +1,117 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel + +__all__ = ["CryptoRetrieveResponse", "Coin", "CoinHistoricalDataPrice"] + + +class CoinHistoricalDataPrice(BaseModel): + adjusted_close: Optional[float] = FieldInfo(alias="adjustedClose", default=None) + """Preço de fechamento ajustado (geralmente igual ao `close` para cripto).""" + + close: Optional[float] = None + """Preço de fechamento da criptomoeda no intervalo.""" + + date: Optional[int] = None + """Data do ponto de dados, representada como um timestamp UNIX.""" + + high: Optional[float] = None + """Preço máximo atingido no intervalo.""" + + low: Optional[float] = None + """Preço mínimo atingido no intervalo.""" + + open: Optional[float] = None + """Preço de abertura da criptomoeda no intervalo.""" + + volume: Optional[int] = None + """ + Volume negociado no intervalo (na criptomoeda ou na moeda de referência, + verificar contexto). + """ + + +class Coin(BaseModel): + coin: Optional[str] = None + """Sigla (ticker) da criptomoeda (ex: `BTC`, `ETH`).""" + + coin_image_url: Optional[str] = FieldInfo(alias="coinImageUrl", default=None) + """URL da imagem do logo da criptomoeda.""" + + coin_name: Optional[str] = FieldInfo(alias="coinName", default=None) + """Nome completo da criptomoeda (ex: `Bitcoin`, `Ethereum`).""" + + currency: Optional[str] = None + """Sigla da moeda fiduciária na qual os preços estão cotados (ex: `BRL`, `USD`).""" + + currency_rate_from_usd: Optional[float] = FieldInfo(alias="currencyRateFromUSD", default=None) + """Taxa de câmbio da `currency` em relação ao USD (Dólar Americano). + + `1 USD = X currency`. + """ + + historical_data_price: Optional[List[CoinHistoricalDataPrice]] = FieldInfo( + alias="historicalDataPrice", default=None + ) + """ + Array contendo a série histórica de preços, retornado se `range` ou `interval` + forem especificados. + """ + + market_cap: Optional[int] = FieldInfo(alias="marketCap", default=None) + """Capitalização de mercado da criptomoeda na `currency` especificada.""" + + regular_market_change: Optional[float] = FieldInfo(alias="regularMarketChange", default=None) + """Variação absoluta do preço nas últimas 24 horas (ou período relevante).""" + + regular_market_change_percent: Optional[float] = FieldInfo(alias="regularMarketChangePercent", default=None) + """Variação percentual do preço nas últimas 24 horas (ou período relevante).""" + + regular_market_day_high: Optional[float] = FieldInfo(alias="regularMarketDayHigh", default=None) + """Preço máximo nas últimas 24 horas (ou período relevante).""" + + regular_market_day_low: Optional[float] = FieldInfo(alias="regularMarketDayLow", default=None) + """Preço mínimo nas últimas 24 horas (ou período relevante).""" + + regular_market_day_range: Optional[str] = FieldInfo(alias="regularMarketDayRange", default=None) + """ + String formatada mostrando o intervalo de preço das últimas 24h (Mínimo - + Máximo). + """ + + regular_market_price: Optional[float] = FieldInfo(alias="regularMarketPrice", default=None) + """Preço atual da criptomoeda na `currency` especificada.""" + + regular_market_time: Optional[datetime] = FieldInfo(alias="regularMarketTime", default=None) + """Timestamp da última atualização da cotação. Formato ISO 8601.""" + + regular_market_volume: Optional[int] = FieldInfo(alias="regularMarketVolume", default=None) + """Volume negociado nas últimas 24 horas (na `currency` especificada).""" + + used_interval: Optional[str] = FieldInfo(alias="usedInterval", default=None) + """ + O intervalo (`interval`) efetivamente utilizado para os dados históricos, se + solicitado. + """ + + used_range: Optional[str] = FieldInfo(alias="usedRange", default=None) + """ + O período (`range`) efetivamente utilizado para os dados históricos, se + solicitado. + """ + + valid_intervals: Optional[List[str]] = FieldInfo(alias="validIntervals", default=None) + """Lista dos valores válidos para o parâmetro `interval` nesta criptomoeda.""" + + valid_ranges: Optional[List[str]] = FieldInfo(alias="validRanges", default=None) + """Lista dos valores válidos para o parâmetro `range` nesta criptomoeda.""" + + +class CryptoRetrieveResponse(BaseModel): + coins: Optional[List[Coin]] = None + """Array contendo os resultados detalhados para cada criptomoeda solicitada.""" diff --git a/src/brapi/types/v2/currency_list_available_params.py b/src/brapi/types/v2/currency_list_available_params.py new file mode 100644 index 0000000..8ce9441 --- /dev/null +++ b/src/brapi/types/v2/currency_list_available_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["CurrencyListAvailableParams"] + + +class CurrencyListAvailableParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + search: str + """ + **Opcional.** Termo para filtrar a lista pelo nome da moeda (correspondência + parcial, case-insensitive). + """ diff --git a/src/brapi/types/v2/currency_list_available_response.py b/src/brapi/types/v2/currency_list_available_response.py new file mode 100644 index 0000000..2a515fc --- /dev/null +++ b/src/brapi/types/v2/currency_list_available_response.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["CurrencyListAvailableResponse", "Currency"] + + +class Currency(BaseModel): + currency: Optional[str] = None + """ + Nome da moeda ou par de moedas suportado (ex: `Dólar Americano/Real Brasileiro`, + `Euro/Real Brasileiro`). A sigla pode ser extraída deste nome ou consultada em + documentação adicional. + """ + + +class CurrencyListAvailableResponse(BaseModel): + currencies: Optional[List[Currency]] = None + """ + Lista de objetos, cada um contendo o nome de uma moeda fiduciária ou par + suportado pela API. + """ diff --git a/src/brapi/types/v2/currency_retrieve_params.py b/src/brapi/types/v2/currency_retrieve_params.py new file mode 100644 index 0000000..8ff1825 --- /dev/null +++ b/src/brapi/types/v2/currency_retrieve_params.py @@ -0,0 +1,35 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["CurrencyRetrieveParams"] + + +class CurrencyRetrieveParams(TypedDict, total=False): + currency: Required[str] + """ + **Obrigatório.** Uma lista de um ou mais pares de moedas a serem consultados, + separados por vírgula (`,`). + + - **Formato:** `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`). + - **Disponibilidade:** Consulte os pares válidos usando o endpoint + [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). + - **Exemplo:** `USD-BRL,EUR-BRL,BTC-BRL` + """ + + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ diff --git a/src/brapi/types/v2/currency_retrieve_response.py b/src/brapi/types/v2/currency_retrieve_response.py new file mode 100644 index 0000000..7cc09b5 --- /dev/null +++ b/src/brapi/types/v2/currency_retrieve_response.py @@ -0,0 +1,84 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel + +__all__ = ["CurrencyRetrieveResponse", "Currency"] + + +class Currency(BaseModel): + ask_price: str = FieldInfo(alias="askPrice") + """ + **Preço de Venda (Ask):** Preço atual pelo qual o mercado está disposto a vender + a moeda de origem (`fromCurrency`) recebendo a moeda de destino (`toCurrency`). + Formato String. + """ + + bid_price: str = FieldInfo(alias="bidPrice") + """ + **Preço de Compra (Bid):** Preço atual pelo qual o mercado está disposto a + comprar a moeda de origem (`fromCurrency`) pagando com a moeda de destino + (`toCurrency`). Formato String. + """ + + bid_variation: str = FieldInfo(alias="bidVariation") + """ + **Variação Absoluta (Bid):** Mudança absoluta no preço de compra (bid) desde o + último fechamento ou período de referência. Formato String. + """ + + from_currency: str = FieldInfo(alias="fromCurrency") + """**Moeda de Origem:** Sigla da moeda base do par (ex: `USD` em `USD-BRL`).""" + + high: str + """ + **Máxima:** Preço mais alto atingido pelo par no período recente (geralmente + diário). Formato String. + """ + + low: str + """ + **Mínima:** Preço mais baixo atingido pelo par no período recente (geralmente + diário). Formato String. + """ + + name: str + """ + **Nome do Par:** Nome descritivo do par de moedas (ex: + `Dólar Americano/Real Brasileiro`). + """ + + percentage_change: str = FieldInfo(alias="percentageChange") + """ + **Variação Percentual:** Mudança percentual no preço do par desde o último + fechamento ou período de referência. Formato String. + """ + + to_currency: str = FieldInfo(alias="toCurrency") + """ + **Moeda de Destino:** Sigla da moeda de cotação do par (ex: `BRL` em `USD-BRL`). + """ + + updated_at_date: str = FieldInfo(alias="updatedAtDate") + """ + **Data da Atualização:** Data e hora da última atualização da cotação, formatada + de forma legível (`YYYY-MM-DD HH:MM:SS`). + """ + + updated_at_timestamp: str = FieldInfo(alias="updatedAtTimestamp") + """ + **Timestamp da Atualização:** Data e hora da última atualização da cotação, + representada como um **timestamp UNIX** (string contendo o número de segundos + desde 1970-01-01 UTC). + """ + + +class CurrencyRetrieveResponse(BaseModel): + currency: List[Currency] + """ + Array contendo os objetos `CurrencyQuote`, um para cada par de moeda válido + solicitado no parâmetro `currency`. + """ diff --git a/src/brapi/types/v2/inflation_list_available_params.py b/src/brapi/types/v2/inflation_list_available_params.py new file mode 100644 index 0000000..822dc76 --- /dev/null +++ b/src/brapi/types/v2/inflation_list_available_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["InflationListAvailableParams"] + + +class InflationListAvailableParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + search: str + """ + **Opcional.** Termo para filtrar a lista pelo nome do país (correspondência + parcial, case-insensitive). Se omitido, retorna todos os países. + """ diff --git a/src/brapi/types/v2/inflation_list_available_response.py b/src/brapi/types/v2/inflation_list_available_response.py new file mode 100644 index 0000000..971e902 --- /dev/null +++ b/src/brapi/types/v2/inflation_list_available_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["InflationListAvailableResponse"] + + +class InflationListAvailableResponse(BaseModel): + countries: Optional[List[str]] = None + """ + Lista de nomes de países (em minúsculas) para os quais há dados de inflação + disponíveis (ex: `brazil`, `usa`, `argentina`). + """ diff --git a/src/brapi/types/v2/inflation_retrieve_params.py b/src/brapi/types/v2/inflation_retrieve_params.py new file mode 100644 index 0000000..550be1e --- /dev/null +++ b/src/brapi/types/v2/inflation_retrieve_params.py @@ -0,0 +1,63 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from datetime import date +from typing_extensions import Literal, Annotated, TypedDict + +from ..._utils import PropertyInfo + +__all__ = ["InflationRetrieveParams"] + + +class InflationRetrieveParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + country: str + """**Opcional.** Nome do país para o qual buscar os dados de inflação. + + Use nomes em minúsculas. O padrão é `brazil`. Consulte + `/api/v2/inflation/available` para a lista de países suportados. + """ + + end: Annotated[Union[str, date], PropertyInfo(format="iso8601")] + """ + **Opcional.** Data final do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `start` for especificado. + """ + + historical: bool + """**Opcional.** Booleano (`true` ou `false`). + + Define se dados históricos devem ser incluídos. O comportamento exato em + conjunto com `start`/`end` deve ser verificado. Padrão: `false`. + """ + + sort_by: Annotated[Literal["date", "value"], PropertyInfo(alias="sortBy")] + """**Opcional.** Campo pelo qual os resultados da inflação serão ordenados.""" + + sort_order: Annotated[Literal["asc", "desc"], PropertyInfo(alias="sortOrder")] + """**Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). + + Padrão: `desc`. Requer que `sortBy` seja especificado. + """ + + start: Annotated[Union[str, date], PropertyInfo(format="iso8601")] + """ + **Opcional.** Data de início do período desejado para os dados históricos, no + formato `DD/MM/YYYY`. Requerido se `end` for especificado. + """ diff --git a/src/brapi/types/v2/inflation_retrieve_response.py b/src/brapi/types/v2/inflation_retrieve_response.py new file mode 100644 index 0000000..864488a --- /dev/null +++ b/src/brapi/types/v2/inflation_retrieve_response.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel + +__all__ = ["InflationRetrieveResponse", "Inflation"] + + +class Inflation(BaseModel): + date: Optional[str] = None + """Data da medição da inflação, no formato `DD/MM/YYYY`.""" + + epoch_date: Optional[int] = FieldInfo(alias="epochDate", default=None) + """ + Timestamp UNIX (número de segundos desde 1970-01-01 UTC) correspondente à + `date`. + """ + + value: Optional[str] = None + """ + Valor do índice de inflação para a data especificada (formato string, pode + conter `%` ou ser apenas numérico). + """ + + +class InflationRetrieveResponse(BaseModel): + inflation: Optional[List[Inflation]] = None + """ + Array contendo os registros históricos de inflação para o país e período + solicitados. + """ diff --git a/src/brapi/types/v2/prime_rate_list_available_params.py b/src/brapi/types/v2/prime_rate_list_available_params.py new file mode 100644 index 0000000..a13a836 --- /dev/null +++ b/src/brapi/types/v2/prime_rate_list_available_params.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["PrimeRateListAvailableParams"] + + +class PrimeRateListAvailableParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + search: str + """**Opcional.** Termo para filtrar a lista de países por nome. + + Retorna países cujos nomes contenham o termo especificado (case insensitive). + """ diff --git a/src/brapi/types/v2/prime_rate_list_available_response.py b/src/brapi/types/v2/prime_rate_list_available_response.py new file mode 100644 index 0000000..4cfacdd --- /dev/null +++ b/src/brapi/types/v2/prime_rate_list_available_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel + +__all__ = ["PrimeRateListAvailableResponse"] + + +class PrimeRateListAvailableResponse(BaseModel): + countries: Optional[List[str]] = None + """ + Lista de países com dados de taxa básica de juros (SELIC) disponíveis para + consulta. + """ diff --git a/src/brapi/types/v2/prime_rate_retrieve_params.py b/src/brapi/types/v2/prime_rate_retrieve_params.py new file mode 100644 index 0000000..0fb3d75 --- /dev/null +++ b/src/brapi/types/v2/prime_rate_retrieve_params.py @@ -0,0 +1,67 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from datetime import date +from typing_extensions import Literal, Annotated, TypedDict + +from ..._utils import PropertyInfo + +__all__ = ["PrimeRateRetrieveParams"] + + +class PrimeRateRetrieveParams(TypedDict, total=False): + token: str + """ + **Obrigatório caso não esteja adicionado como header "Authorization".** Seu + token de autenticação pessoal da API Brapi. + + **Formas de Envio:** + + 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. + 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua + requisição. + + Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. + Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). + """ + + country: str + """ + **Opcional.** O país do qual você deseja obter informações sobre a taxa básica + de juros. Por padrão, o país é definido como brazil. Você pode consultar a lista + de países disponíveis através do endpoint `/api/v2/prime-rate/available`. + """ + + end: Annotated[Union[str, date], PropertyInfo(format="iso8601")] + """**Opcional.** Data final do período para busca no formato DD/MM/YYYY. + + Por padrão é a data atual. Útil quando `historical=true` para restringir o + período da série histórica. + """ + + historical: bool + """**Opcional.** Define se os dados históricos serão retornados. + + Se definido como `true`, retorna a série histórica completa. Se `false` (padrão) + ou omitido, retorna apenas o valor mais recente. + """ + + sort_by: Annotated[Literal["date", "value"], PropertyInfo(alias="sortBy")] + """**Opcional.** Campo pelo qual os resultados serão ordenados. + + Por padrão, ordena por `date` (data). + """ + + sort_order: Annotated[Literal["asc", "desc"], PropertyInfo(alias="sortOrder")] + """ + **Opcional.** Define se a ordenação será crescente (`asc`) ou decrescente + (`desc`). Por padrão, é `desc` (decrescente). + """ + + start: Annotated[Union[str, date], PropertyInfo(format="iso8601")] + """**Opcional.** Data inicial do período para busca no formato DD/MM/YYYY. + + Útil quando `historical=true` para restringir o período da série histórica. + """ diff --git a/src/brapi/types/v2/prime_rate_retrieve_response.py b/src/brapi/types/v2/prime_rate_retrieve_response.py new file mode 100644 index 0000000..967860e --- /dev/null +++ b/src/brapi/types/v2/prime_rate_retrieve_response.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from pydantic import Field as FieldInfo + +from ..._models import BaseModel + +__all__ = ["PrimeRateRetrieveResponse", "PrimeRate"] + + +class PrimeRate(BaseModel): + date: Optional[str] = None + """Data do registro no formato DD/MM/YYYY.""" + + epoch_date: Optional[int] = FieldInfo(alias="epochDate", default=None) + """Timestamp em milissegundos (formato epoch) correspondente à data do registro.""" + + value: Optional[str] = None + """Valor da taxa básica de juros (SELIC) para a data correspondente.""" + + +class PrimeRateRetrieveResponse(BaseModel): + prime_rate: Optional[List[PrimeRate]] = FieldInfo(alias="prime-rate", default=None) + """ + Array contendo os registros históricos de taxa básica de juros (SELIC) para o + país e período solicitados. + """ diff --git a/src/brapi/types/value_added_entry.py b/src/brapi/types/value_added_entry.py new file mode 100644 index 0000000..70c0db2 --- /dev/null +++ b/src/brapi/types/value_added_entry.py @@ -0,0 +1,240 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import date +from typing_extensions import Literal + +from pydantic import Field as FieldInfo + +from .._models import BaseModel + +__all__ = ["ValueAddedEntry"] + + +class ValueAddedEntry(BaseModel): + added_value_received_by_transfer: Optional[float] = FieldInfo(alias="addedValueReceivedByTransfer", default=None) + """ + Valor Adicionado Recebido em Transferência (Resultado de Equivalência + Patrimonial, Receitas Financeiras, etc.). Item 6 da DVA. + """ + + added_value_received_on_transfer: Optional[float] = FieldInfo(alias="addedValueReceivedOnTransfer", default=None) + """ + Valor Adicionado Recebido em Transferência (sinônimo de + `addedValueReceivedByTransfer`). + """ + + added_value_to_distribute: Optional[float] = FieldInfo(alias="addedValueToDistribute", default=None) + """ + Valor Adicionado Total a Distribuir (Líquido Produzido + Recebido em + Transferência). Item 7 da DVA. + """ + + claims_and_benefits: Optional[float] = FieldInfo(alias="claimsAndBenefits", default=None) + """Sinistros Retidos e Benefícios.""" + + complementary_pension_operations_revenue: Optional[float] = FieldInfo( + alias="complementaryPensionOperationsRevenue", default=None + ) + """Receita com Operações de Previdência Complementar.""" + + construction_of_own_assets: Optional[float] = FieldInfo(alias="constructionOfOwnAssets", default=None) + """Construção de Ativos Próprios.""" + + costs_with_products_sold: Optional[float] = FieldInfo(alias="costsWithProductsSold", default=None) + """Custos dos Produtos, Mercadorias e Serviços Vendidos (detalhamento).""" + + depreciation_and_amortization: Optional[float] = FieldInfo(alias="depreciationAndAmortization", default=None) + """Depreciação e Amortização.""" + + distribution_of_added_value: Optional[float] = FieldInfo(alias="distributionOfAddedValue", default=None) + """Distribuição do Valor Adicionado (Soma dos itens seguintes). Item 8 da DVA.""" + + dividends: Optional[float] = None + """Dividendos Distribuídos.""" + + end_date: Optional[date] = FieldInfo(alias="endDate", default=None) + """Data de término do período fiscal ao qual a DVA se refere (YYYY-MM-DD).""" + + equity_income_result: Optional[float] = FieldInfo(alias="equityIncomeResult", default=None) + """Resultado de Equivalência Patrimonial (como receita na DVA).""" + + equity_remuneration: Optional[float] = FieldInfo(alias="equityRemuneration", default=None) + """Remuneração de Capitais Próprios (JCP, Dividendos, Lucros Retidos).""" + + federal_taxes: Optional[float] = FieldInfo(alias="federalTaxes", default=None) + """Impostos Federais (IRPJ, CSLL, PIS, COFINS, IPI).""" + + fees_revenue: Optional[float] = FieldInfo(alias="feesRevenue", default=None) + """Receita com Taxas e Comissões.""" + + financial_income: Optional[float] = FieldInfo(alias="financialIncome", default=None) + """Receitas Financeiras (como valor recebido em transferência).""" + + financial_intermediation_expenses: Optional[float] = FieldInfo( + alias="financialIntermediationExpenses", default=None + ) + """Despesas de Intermediação Financeira (específico para bancos).""" + + financial_intermediation_revenue: Optional[float] = FieldInfo(alias="financialIntermediationRevenue", default=None) + """Receita de Intermediação Financeira (específico para bancos).""" + + gross_added_value: Optional[float] = FieldInfo(alias="grossAddedValue", default=None) + """Valor Adicionado Bruto (Receitas - Insumos). Item 3 da DVA.""" + + insurance_operations_revenue: Optional[float] = FieldInfo(alias="insuranceOperationsRevenue", default=None) + """Receita com Operações de Seguros (específico para Seguradoras).""" + + insurance_operations_variations: Optional[float] = FieldInfo(alias="insuranceOperationsVariations", default=None) + """Variações de Operações de Seguros.""" + + interest_on_own_equity: Optional[float] = FieldInfo(alias="interestOnOwnEquity", default=None) + """Juros sobre o Capital Próprio (JCP).""" + + loss_or_recovery_of_assets: Optional[float] = FieldInfo(alias="lossOrRecoveryOfAssets", default=None) + """Perda/Recuperação de Valores de Ativos (Impairment - como custo/receita).""" + + loss_or_recovery_of_asset_values: Optional[float] = FieldInfo(alias="lossOrRecoveryOfAssetValues", default=None) + """Perda / Recuperação de Valores de Ativos (Impairment).""" + + materials_energy_and_others: Optional[float] = FieldInfo(alias="materialsEnergyAndOthers", default=None) + """Custos com Materiais, Energia, Serviços de Terceiros e Outros.""" + + municipal_taxes: Optional[float] = FieldInfo(alias="municipalTaxes", default=None) + """Impostos Municipais (ISS).""" + + net_added_value: Optional[float] = FieldInfo(alias="netAddedValue", default=None) + """Valor Adicionado Líquido Produzido pela Entidade (Bruto - Retenções). + + Item 5 da DVA. + """ + + net_added_value_produced: Optional[float] = FieldInfo(alias="netAddedValueProduced", default=None) + """Valor Adicionado Líquido Produzido (sinônimo de `netAddedValue`).""" + + net_operating_revenue: Optional[float] = FieldInfo(alias="netOperatingRevenue", default=None) + """Receita Operacional Líquida (detalhamento).""" + + non_controlling_share_of_retained_earnings: Optional[float] = FieldInfo( + alias="nonControllingShareOfRetainedEarnings", default=None + ) + """Participação dos Não Controladores nos Lucros Retidos.""" + + other_distributions: Optional[float] = FieldInfo(alias="otherDistributions", default=None) + """Outras Distribuições.""" + + other_retentions: Optional[float] = FieldInfo(alias="otherRetentions", default=None) + """Outras Retenções (Exaustão, etc.).""" + + other_revenues: Optional[float] = FieldInfo(alias="otherRevenues", default=None) + """Outras Receitas.""" + + other_supplies: Optional[float] = FieldInfo(alias="otherSupplies", default=None) + """Outros Insumos.""" + + other_values_received_by_transfer: Optional[float] = FieldInfo(alias="otherValuesReceivedByTransfer", default=None) + """Outros Valores Recebidos (Receitas Financeiras, Aluguéis, etc.).""" + + other_variations: Optional[float] = FieldInfo(alias="otherVariations", default=None) + """Outras Variações.""" + + own_equity_remuneration: Optional[float] = FieldInfo(alias="ownEquityRemuneration", default=None) + """Remuneração de Capitais Próprios (sinônimo de `equityRemuneration`).""" + + pension_operations_variations: Optional[float] = FieldInfo(alias="pensionOperationsVariations", default=None) + """Variações de Operações de Previdência.""" + + product_sales: Optional[float] = FieldInfo(alias="productSales", default=None) + """Venda de Produtos e Serviços (detalhamento).""" + + provision_or_reversal_of_doubtful_accounts: Optional[float] = FieldInfo( + alias="provisionOrReversalOfDoubtfulAccounts", default=None + ) + """ + Provisão/Reversão para Créditos de Liquidação Duvidosa (PCLD - como + receita/despesa na DVA). + """ + + provision_or_reversal_of_expected_credit_risk_losses: Optional[float] = FieldInfo( + alias="provisionOrReversalOfExpectedCreditRiskLosses", default=None + ) + """Provisão/Reversão de Perdas com Risco de Crédito (PCLD).""" + + remuneration_of_third_party_capitals: Optional[float] = FieldInfo( + alias="remunerationOfThirdPartyCapitals", default=None + ) + """Remuneração de Capitais de Terceiros (Juros, Aluguéis).""" + + result_of_coinsurance_operations_assigned: Optional[float] = FieldInfo( + alias="resultOfCoinsuranceOperationsAssigned", default=None + ) + """Resultado de Operações de Cosseguros Cedidos.""" + + results_of_ceded_reinsurance_operations: Optional[float] = FieldInfo( + alias="resultsOfCededReinsuranceOperations", default=None + ) + """Resultados de Operações de Resseguros Cedidos.""" + + retained_earnings_or_loss: Optional[float] = FieldInfo(alias="retainedEarningsOrLoss", default=None) + """Lucros Retidos ou Prejuízo do Exercício.""" + + retentions: Optional[float] = None + """Retenções (Depreciação, Amortização e Exaustão). Item 4 da DVA.""" + + revenue: Optional[float] = None + """Receitas (Venda de Mercadorias, Produtos e Serviços, etc.). Item 1 da DVA.""" + + revenue_from_the_provision_of_services: Optional[float] = FieldInfo( + alias="revenueFromTheProvisionOfServices", default=None + ) + """Receita da Prestação de Serviços (detalhamento).""" + + services: Optional[float] = None + """Serviços de Terceiros (detalhamento).""" + + state_taxes: Optional[float] = FieldInfo(alias="stateTaxes", default=None) + """Impostos Estaduais (ICMS).""" + + supplies_purchased_from_third_parties: Optional[float] = FieldInfo( + alias="suppliesPurchasedFromThirdParties", default=None + ) + """Insumos Adquiridos de Terceiros (Custo de Mercadorias, Matérias-Primas). + + Item 2 da DVA. + """ + + symbol: Optional[str] = None + """Ticker do ativo ao qual a DVA se refere.""" + + taxes: Optional[float] = None + """Impostos, Taxas e Contribuições (Federais, Estaduais, Municipais).""" + + team_remuneration: Optional[float] = FieldInfo(alias="teamRemuneration", default=None) + """Pessoal e Encargos (Salários, Benefícios, FGTS).""" + + third_party_materials_and_services: Optional[float] = FieldInfo( + alias="thirdPartyMaterialsAndServices", default=None + ) + """Materiais, Energia, Serviços de Terceiros.""" + + total_added_value_to_distribute: Optional[float] = FieldInfo(alias="totalAddedValueToDistribute", default=None) + """Valor Adicionado Total a Distribuir (sinônimo de `addedValueToDistribute`).""" + + type: Optional[Literal["yearly", "quarterly"]] = None + """Indica a periodicidade da DVA: `yearly` (anual) ou `quarterly` (trimestral).""" + + updated_at: Optional[date] = FieldInfo(alias="updatedAt", default=None) + """ + Data da última atualização deste registro específico na fonte de dados + (YYYY-MM-DD). + """ + + variation_in_deferred_selling_expenses: Optional[float] = FieldInfo( + alias="variationInDeferredSellingExpenses", default=None + ) + """Variação nas Despesas de Comercialização Diferidas.""" + + variations_of_technical_provisions: Optional[float] = FieldInfo( + alias="variationsOfTechnicalProvisions", default=None + ) + """Variações das Provisões Técnicas (específico para Seguradoras).""" diff --git a/src/client.ts b/src/client.ts deleted file mode 100644 index 2f16b5c..0000000 --- a/src/client.ts +++ /dev/null @@ -1,773 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import type { RequestInit, RequestInfo, BodyInit } from './internal/builtin-types'; -import type { HTTPMethod, PromiseOrValue, MergedRequestInit, FinalizedRequestInit } from './internal/types'; -import { uuid4 } from './internal/utils/uuid'; -import { validatePositiveInteger, isAbsoluteURL, safeJSON } from './internal/utils/values'; -import { sleep } from './internal/utils/sleep'; -export type { Logger, LogLevel } from './internal/utils/log'; -import { castToError, isAbortError } from './internal/errors'; -import type { APIResponseProps } from './internal/parse'; -import { getPlatformHeaders } from './internal/detect-platform'; -import * as Shims from './internal/shims'; -import * as Opts from './internal/request-options'; -import * as qs from './internal/qs'; -import { VERSION } from './version'; -import * as Errors from './core/error'; -import * as Uploads from './core/uploads'; -import * as API from './resources/index'; -import { APIPromise } from './core/api-promise'; -import { Available, AvailableListParams, AvailableListResponse } from './resources/available'; -import { - BalanceSheetEntry, - CashflowEntry, - DefaultKeyStatisticsEntry, - FinancialDataEntry, - IncomeStatementEntry, - Quote, - QuoteListParams, - QuoteListResponse, - QuoteRetrieveParams, - QuoteRetrieveResponse, - ValueAddedEntry, -} from './resources/quote'; -import { V2 } from './resources/v2/v2'; -import { type Fetch } from './internal/builtin-types'; -import { HeadersLike, NullableHeaders, buildHeaders } from './internal/headers'; -import { FinalRequestOptions, RequestOptions } from './internal/request-options'; -import { readEnv } from './internal/utils/env'; -import { - type LogLevel, - type Logger, - formatRequestDetails, - loggerFor, - parseLogLevel, -} from './internal/utils/log'; -import { isEmptyObj } from './internal/utils/values'; - -const environments = { - production: 'https://brapi.dev', - environment_1: 'http://localhost:3000', -}; -type Environment = keyof typeof environments; - -export interface ClientOptions { - /** - * Autenticação via header HTTP `Authorization`. Use o formato `Authorization: Bearer SEU_TOKEN`. [Obtenha seu token](https://brapi.dev/dashboard). - */ - apiKey?: string | undefined; - - /** - * Specifies the environment to use for the API. - * - * Each environment maps to a different base URL: - * - `production` corresponds to `https://brapi.dev` - * - `environment_1` corresponds to `http://localhost:3000` - */ - environment?: Environment | undefined; - - /** - * Override the default base URL for the API, e.g., "https://api.example.com/v2/" - * - * Defaults to process.env['BRAPI_BASE_URL']. - */ - baseURL?: string | null | undefined; - - /** - * The maximum amount of time (in milliseconds) that the client should wait for a response - * from the server before timing out a single request. - * - * Note that request timeouts are retried by default, so in a worst-case scenario you may wait - * much longer than this timeout before the promise succeeds or fails. - * - * @unit milliseconds - */ - timeout?: number | undefined; - /** - * Additional `RequestInit` options to be passed to `fetch` calls. - * Properties will be overridden by per-request `fetchOptions`. - */ - fetchOptions?: MergedRequestInit | undefined; - - /** - * Specify a custom `fetch` function implementation. - * - * If not provided, we expect that `fetch` is defined globally. - */ - fetch?: Fetch | undefined; - - /** - * The maximum number of times that the client will retry a request in case of a - * temporary failure, like a network error or a 5XX error from the server. - * - * @default 2 - */ - maxRetries?: number | undefined; - - /** - * Default headers to include with every request to the API. - * - * These can be removed in individual requests by explicitly setting the - * header to `null` in request options. - */ - defaultHeaders?: HeadersLike | undefined; - - /** - * Default query parameters to include with every request to the API. - * - * These can be removed in individual requests by explicitly setting the - * param to `undefined` in request options. - */ - defaultQuery?: Record | undefined; - - /** - * Set the log level. - * - * Defaults to process.env['BRAPI_LOG'] or 'warn' if it isn't set. - */ - logLevel?: LogLevel | undefined; - - /** - * Set the logger. - * - * Defaults to globalThis.console. - */ - logger?: Logger | undefined; -} - -/** - * API Client for interfacing with the Brapi API. - */ -export class Brapi { - apiKey: string; - - baseURL: string; - maxRetries: number; - timeout: number; - logger: Logger | undefined; - logLevel: LogLevel | undefined; - fetchOptions: MergedRequestInit | undefined; - - private fetch: Fetch; - #encoder: Opts.RequestEncoder; - protected idempotencyHeader?: string; - private _options: ClientOptions; - - /** - * API Client for interfacing with the Brapi API. - * - * @param {string | undefined} [opts.apiKey=process.env['BRAPI_API_KEY'] ?? undefined] - * @param {Environment} [opts.environment=production] - Specifies the environment URL to use for the API. - * @param {string} [opts.baseURL=process.env['BRAPI_BASE_URL'] ?? https://brapi.dev] - Override the default base URL for the API. - * @param {number} [opts.timeout=1 minute] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. - * @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls. - * @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. - * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. - * @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API. - * @param {Record} opts.defaultQuery - Default query parameters to include with every request to the API. - */ - constructor({ - baseURL = readEnv('BRAPI_BASE_URL'), - apiKey = readEnv('BRAPI_API_KEY'), - ...opts - }: ClientOptions = {}) { - if (apiKey === undefined) { - throw new Errors.BrapiError( - "The BRAPI_API_KEY environment variable is missing or empty; either provide it, or instantiate the Brapi client with an apiKey option, like new Brapi({ apiKey: 'My API Key' }).", - ); - } - - const options: ClientOptions = { - apiKey, - ...opts, - baseURL, - environment: opts.environment ?? 'production', - }; - - if (baseURL && opts.environment) { - throw new Errors.BrapiError( - 'Ambiguous URL; The `baseURL` option (or BRAPI_BASE_URL env var) and the `environment` option are given. If you want to use the environment you must pass baseURL: null', - ); - } - - this.baseURL = options.baseURL || environments[options.environment || 'production']; - this.timeout = options.timeout ?? Brapi.DEFAULT_TIMEOUT /* 1 minute */; - this.logger = options.logger ?? console; - const defaultLogLevel = 'warn'; - // Set default logLevel early so that we can log a warning in parseLogLevel. - this.logLevel = defaultLogLevel; - this.logLevel = - parseLogLevel(options.logLevel, 'ClientOptions.logLevel', this) ?? - parseLogLevel(readEnv('BRAPI_LOG'), "process.env['BRAPI_LOG']", this) ?? - defaultLogLevel; - this.fetchOptions = options.fetchOptions; - this.maxRetries = options.maxRetries ?? 2; - this.fetch = options.fetch ?? Shims.getDefaultFetch(); - this.#encoder = Opts.FallbackEncoder; - - this._options = options; - - this.apiKey = apiKey; - } - - /** - * Create a new client instance re-using the same options given to the current client with optional overriding. - */ - withOptions(options: Partial): this { - const client = new (this.constructor as any as new (props: ClientOptions) => typeof this)({ - ...this._options, - environment: options.environment ? options.environment : undefined, - baseURL: options.environment ? undefined : this.baseURL, - maxRetries: this.maxRetries, - timeout: this.timeout, - logger: this.logger, - logLevel: this.logLevel, - fetch: this.fetch, - fetchOptions: this.fetchOptions, - apiKey: this.apiKey, - ...options, - }); - return client; - } - - /** - * Check whether the base URL is set to its default. - */ - #baseURLOverridden(): boolean { - return this.baseURL !== environments[this._options.environment || 'production']; - } - - protected defaultQuery(): Record | undefined { - return this._options.defaultQuery; - } - - protected validateHeaders({ values, nulls }: NullableHeaders) { - return; - } - - protected async authHeaders(opts: FinalRequestOptions): Promise { - return buildHeaders([{ Authorization: `Bearer ${this.apiKey}` }]); - } - - protected stringifyQuery(query: Record): string { - return qs.stringify(query, { arrayFormat: 'comma' }); - } - - private getUserAgent(): string { - return `${this.constructor.name}/JS ${VERSION}`; - } - - protected defaultIdempotencyKey(): string { - return `stainless-node-retry-${uuid4()}`; - } - - protected makeStatusError( - status: number, - error: Object, - message: string | undefined, - headers: Headers, - ): Errors.APIError { - return Errors.APIError.generate(status, error, message, headers); - } - - buildURL( - path: string, - query: Record | null | undefined, - defaultBaseURL?: string | undefined, - ): string { - const baseURL = (!this.#baseURLOverridden() && defaultBaseURL) || this.baseURL; - const url = - isAbsoluteURL(path) ? - new URL(path) - : new URL(baseURL + (baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); - - const defaultQuery = this.defaultQuery(); - if (!isEmptyObj(defaultQuery)) { - query = { ...defaultQuery, ...query }; - } - - if (typeof query === 'object' && query && !Array.isArray(query)) { - url.search = this.stringifyQuery(query as Record); - } - - return url.toString(); - } - - /** - * Used as a callback for mutating the given `FinalRequestOptions` object. - */ - protected async prepareOptions(options: FinalRequestOptions): Promise {} - - /** - * Used as a callback for mutating the given `RequestInit` object. - * - * This is useful for cases where you want to add certain headers based off of - * the request properties, e.g. `method` or `url`. - */ - protected async prepareRequest( - request: RequestInit, - { url, options }: { url: string; options: FinalRequestOptions }, - ): Promise {} - - get(path: string, opts?: PromiseOrValue): APIPromise { - return this.methodRequest('get', path, opts); - } - - post(path: string, opts?: PromiseOrValue): APIPromise { - return this.methodRequest('post', path, opts); - } - - patch(path: string, opts?: PromiseOrValue): APIPromise { - return this.methodRequest('patch', path, opts); - } - - put(path: string, opts?: PromiseOrValue): APIPromise { - return this.methodRequest('put', path, opts); - } - - delete(path: string, opts?: PromiseOrValue): APIPromise { - return this.methodRequest('delete', path, opts); - } - - private methodRequest( - method: HTTPMethod, - path: string, - opts?: PromiseOrValue, - ): APIPromise { - return this.request( - Promise.resolve(opts).then((opts) => { - return { method, path, ...opts }; - }), - ); - } - - request( - options: PromiseOrValue, - remainingRetries: number | null = null, - ): APIPromise { - return new APIPromise(this, this.makeRequest(options, remainingRetries, undefined)); - } - - private async makeRequest( - optionsInput: PromiseOrValue, - retriesRemaining: number | null, - retryOfRequestLogID: string | undefined, - ): Promise { - const options = await optionsInput; - const maxRetries = options.maxRetries ?? this.maxRetries; - if (retriesRemaining == null) { - retriesRemaining = maxRetries; - } - - await this.prepareOptions(options); - - const { req, url, timeout } = await this.buildRequest(options, { - retryCount: maxRetries - retriesRemaining, - }); - - await this.prepareRequest(req, { url, options }); - - /** Not an API request ID, just for correlating local log entries. */ - const requestLogID = 'log_' + ((Math.random() * (1 << 24)) | 0).toString(16).padStart(6, '0'); - const retryLogStr = retryOfRequestLogID === undefined ? '' : `, retryOf: ${retryOfRequestLogID}`; - const startTime = Date.now(); - - loggerFor(this).debug( - `[${requestLogID}] sending request`, - formatRequestDetails({ - retryOfRequestLogID, - method: options.method, - url, - options, - headers: req.headers, - }), - ); - - if (options.signal?.aborted) { - throw new Errors.APIUserAbortError(); - } - - const controller = new AbortController(); - const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError); - const headersTime = Date.now(); - - if (response instanceof globalThis.Error) { - const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; - if (options.signal?.aborted) { - throw new Errors.APIUserAbortError(); - } - // detect native connection timeout errors - // deno throws "TypeError: error sending request for url (https://example/): client error (Connect): tcp connect error: Operation timed out (os error 60): Operation timed out (os error 60)" - // undici throws "TypeError: fetch failed" with cause "ConnectTimeoutError: Connect Timeout Error (attempted address: example:443, timeout: 1ms)" - // others do not provide enough information to distinguish timeouts from other connection errors - const isTimeout = - isAbortError(response) || - /timed? ?out/i.test(String(response) + ('cause' in response ? String(response.cause) : '')); - if (retriesRemaining) { - loggerFor(this).info( - `[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - ${retryMessage}`, - ); - loggerFor(this).debug( - `[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (${retryMessage})`, - formatRequestDetails({ - retryOfRequestLogID, - url, - durationMs: headersTime - startTime, - message: response.message, - }), - ); - return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID); - } - loggerFor(this).info( - `[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - error; no more retries left`, - ); - loggerFor(this).debug( - `[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (error; no more retries left)`, - formatRequestDetails({ - retryOfRequestLogID, - url, - durationMs: headersTime - startTime, - message: response.message, - }), - ); - if (isTimeout) { - throw new Errors.APIConnectionTimeoutError(); - } - throw new Errors.APIConnectionError({ cause: response }); - } - - const responseInfo = `[${requestLogID}${retryLogStr}] ${req.method} ${url} ${ - response.ok ? 'succeeded' : 'failed' - } with status ${response.status} in ${headersTime - startTime}ms`; - - if (!response.ok) { - const shouldRetry = await this.shouldRetry(response); - if (retriesRemaining && shouldRetry) { - const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; - - // We don't need the body of this response. - await Shims.CancelReadableStream(response.body); - loggerFor(this).info(`${responseInfo} - ${retryMessage}`); - loggerFor(this).debug( - `[${requestLogID}] response error (${retryMessage})`, - formatRequestDetails({ - retryOfRequestLogID, - url: response.url, - status: response.status, - headers: response.headers, - durationMs: headersTime - startTime, - }), - ); - return this.retryRequest( - options, - retriesRemaining, - retryOfRequestLogID ?? requestLogID, - response.headers, - ); - } - - const retryMessage = shouldRetry ? `error; no more retries left` : `error; not retryable`; - - loggerFor(this).info(`${responseInfo} - ${retryMessage}`); - - const errText = await response.text().catch((err: any) => castToError(err).message); - const errJSON = safeJSON(errText); - const errMessage = errJSON ? undefined : errText; - - loggerFor(this).debug( - `[${requestLogID}] response error (${retryMessage})`, - formatRequestDetails({ - retryOfRequestLogID, - url: response.url, - status: response.status, - headers: response.headers, - message: errMessage, - durationMs: Date.now() - startTime, - }), - ); - - const err = this.makeStatusError(response.status, errJSON, errMessage, response.headers); - throw err; - } - - loggerFor(this).info(responseInfo); - loggerFor(this).debug( - `[${requestLogID}] response start`, - formatRequestDetails({ - retryOfRequestLogID, - url: response.url, - status: response.status, - headers: response.headers, - durationMs: headersTime - startTime, - }), - ); - - return { response, options, controller, requestLogID, retryOfRequestLogID, startTime }; - } - - async fetchWithTimeout( - url: RequestInfo, - init: RequestInit | undefined, - ms: number, - controller: AbortController, - ): Promise { - const { signal, method, ...options } = init || {}; - if (signal) signal.addEventListener('abort', () => controller.abort()); - - const timeout = setTimeout(() => controller.abort(), ms); - - const isReadableBody = - ((globalThis as any).ReadableStream && options.body instanceof (globalThis as any).ReadableStream) || - (typeof options.body === 'object' && options.body !== null && Symbol.asyncIterator in options.body); - - const fetchOptions: RequestInit = { - signal: controller.signal as any, - ...(isReadableBody ? { duplex: 'half' } : {}), - method: 'GET', - ...options, - }; - if (method) { - // Custom methods like 'patch' need to be uppercased - // See https://github.com/nodejs/undici/issues/2294 - fetchOptions.method = method.toUpperCase(); - } - - try { - // use undefined this binding; fetch errors if bound to something else in browser/cloudflare - return await this.fetch.call(undefined, url, fetchOptions); - } finally { - clearTimeout(timeout); - } - } - - private async shouldRetry(response: Response): Promise { - // Note this is not a standard header. - const shouldRetryHeader = response.headers.get('x-should-retry'); - - // If the server explicitly says whether or not to retry, obey. - if (shouldRetryHeader === 'true') return true; - if (shouldRetryHeader === 'false') return false; - - // Retry on request timeouts. - if (response.status === 408) return true; - - // Retry on lock timeouts. - if (response.status === 409) return true; - - // Retry on rate limits. - if (response.status === 429) return true; - - // Retry internal errors. - if (response.status >= 500) return true; - - return false; - } - - private async retryRequest( - options: FinalRequestOptions, - retriesRemaining: number, - requestLogID: string, - responseHeaders?: Headers | undefined, - ): Promise { - let timeoutMillis: number | undefined; - - // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. - const retryAfterMillisHeader = responseHeaders?.get('retry-after-ms'); - if (retryAfterMillisHeader) { - const timeoutMs = parseFloat(retryAfterMillisHeader); - if (!Number.isNaN(timeoutMs)) { - timeoutMillis = timeoutMs; - } - } - - // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After - const retryAfterHeader = responseHeaders?.get('retry-after'); - if (retryAfterHeader && !timeoutMillis) { - const timeoutSeconds = parseFloat(retryAfterHeader); - if (!Number.isNaN(timeoutSeconds)) { - timeoutMillis = timeoutSeconds * 1000; - } else { - timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); - } - } - - // If the API asks us to wait a certain amount of time (and it's a reasonable amount), - // just do what it says, but otherwise calculate a default - if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { - const maxRetries = options.maxRetries ?? this.maxRetries; - timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); - } - await sleep(timeoutMillis); - - return this.makeRequest(options, retriesRemaining - 1, requestLogID); - } - - private calculateDefaultRetryTimeoutMillis(retriesRemaining: number, maxRetries: number): number { - const initialRetryDelay = 0.5; - const maxRetryDelay = 8.0; - - const numRetries = maxRetries - retriesRemaining; - - // Apply exponential backoff, but not more than the max. - const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); - - // Apply some jitter, take up to at most 25 percent of the retry time. - const jitter = 1 - Math.random() * 0.25; - - return sleepSeconds * jitter * 1000; - } - - async buildRequest( - inputOptions: FinalRequestOptions, - { retryCount = 0 }: { retryCount?: number } = {}, - ): Promise<{ req: FinalizedRequestInit; url: string; timeout: number }> { - const options = { ...inputOptions }; - const { method, path, query, defaultBaseURL } = options; - - const url = this.buildURL(path!, query as Record, defaultBaseURL); - if ('timeout' in options) validatePositiveInteger('timeout', options.timeout); - options.timeout = options.timeout ?? this.timeout; - const { bodyHeaders, body } = this.buildBody({ options }); - const reqHeaders = await this.buildHeaders({ options: inputOptions, method, bodyHeaders, retryCount }); - - const req: FinalizedRequestInit = { - method, - headers: reqHeaders, - ...(options.signal && { signal: options.signal }), - ...((globalThis as any).ReadableStream && - body instanceof (globalThis as any).ReadableStream && { duplex: 'half' }), - ...(body && { body }), - ...((this.fetchOptions as any) ?? {}), - ...((options.fetchOptions as any) ?? {}), - }; - - return { req, url, timeout: options.timeout }; - } - - private async buildHeaders({ - options, - method, - bodyHeaders, - retryCount, - }: { - options: FinalRequestOptions; - method: HTTPMethod; - bodyHeaders: HeadersLike; - retryCount: number; - }): Promise { - let idempotencyHeaders: HeadersLike = {}; - if (this.idempotencyHeader && method !== 'get') { - if (!options.idempotencyKey) options.idempotencyKey = this.defaultIdempotencyKey(); - idempotencyHeaders[this.idempotencyHeader] = options.idempotencyKey; - } - - const headers = buildHeaders([ - idempotencyHeaders, - { - Accept: 'application/json', - 'User-Agent': this.getUserAgent(), - 'X-Stainless-Retry-Count': String(retryCount), - ...(options.timeout ? { 'X-Stainless-Timeout': String(Math.trunc(options.timeout / 1000)) } : {}), - ...getPlatformHeaders(), - }, - await this.authHeaders(options), - this._options.defaultHeaders, - bodyHeaders, - options.headers, - ]); - - this.validateHeaders(headers); - - return headers.values; - } - - private buildBody({ options: { body, headers: rawHeaders } }: { options: FinalRequestOptions }): { - bodyHeaders: HeadersLike; - body: BodyInit | undefined; - } { - if (!body) { - return { bodyHeaders: undefined, body: undefined }; - } - const headers = buildHeaders([rawHeaders]); - if ( - // Pass raw type verbatim - ArrayBuffer.isView(body) || - body instanceof ArrayBuffer || - body instanceof DataView || - (typeof body === 'string' && - // Preserve legacy string encoding behavior for now - headers.values.has('content-type')) || - // `Blob` is superset of `File` - ((globalThis as any).Blob && body instanceof (globalThis as any).Blob) || - // `FormData` -> `multipart/form-data` - body instanceof FormData || - // `URLSearchParams` -> `application/x-www-form-urlencoded` - body instanceof URLSearchParams || - // Send chunked stream (each chunk has own `length`) - ((globalThis as any).ReadableStream && body instanceof (globalThis as any).ReadableStream) - ) { - return { bodyHeaders: undefined, body: body as BodyInit }; - } else if ( - typeof body === 'object' && - (Symbol.asyncIterator in body || - (Symbol.iterator in body && 'next' in body && typeof body.next === 'function')) - ) { - return { bodyHeaders: undefined, body: Shims.ReadableStreamFrom(body as AsyncIterable) }; - } else { - return this.#encoder({ body, headers }); - } - } - - static Brapi = this; - static DEFAULT_TIMEOUT = 60000; // 1 minute - - static BrapiError = Errors.BrapiError; - static APIError = Errors.APIError; - static APIConnectionError = Errors.APIConnectionError; - static APIConnectionTimeoutError = Errors.APIConnectionTimeoutError; - static APIUserAbortError = Errors.APIUserAbortError; - static NotFoundError = Errors.NotFoundError; - static ConflictError = Errors.ConflictError; - static RateLimitError = Errors.RateLimitError; - static BadRequestError = Errors.BadRequestError; - static AuthenticationError = Errors.AuthenticationError; - static InternalServerError = Errors.InternalServerError; - static PermissionDeniedError = Errors.PermissionDeniedError; - static UnprocessableEntityError = Errors.UnprocessableEntityError; - - static toFile = Uploads.toFile; - - quote: API.Quote = new API.Quote(this); - available: API.Available = new API.Available(this); - v2: API.V2 = new API.V2(this); -} - -Brapi.Quote = Quote; -Brapi.Available = Available; -Brapi.V2 = V2; - -export declare namespace Brapi { - export type RequestOptions = Opts.RequestOptions; - - export { - Quote as Quote, - type BalanceSheetEntry as BalanceSheetEntry, - type CashflowEntry as CashflowEntry, - type DefaultKeyStatisticsEntry as DefaultKeyStatisticsEntry, - type FinancialDataEntry as FinancialDataEntry, - type IncomeStatementEntry as IncomeStatementEntry, - type ValueAddedEntry as ValueAddedEntry, - type QuoteRetrieveResponse as QuoteRetrieveResponse, - type QuoteListResponse as QuoteListResponse, - type QuoteRetrieveParams as QuoteRetrieveParams, - type QuoteListParams as QuoteListParams, - }; - - export { - Available as Available, - type AvailableListResponse as AvailableListResponse, - type AvailableListParams as AvailableListParams, - }; - - export { V2 as V2 }; -} diff --git a/src/core/README.md b/src/core/README.md deleted file mode 100644 index 485fce8..0000000 --- a/src/core/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# `core` - -This directory holds public modules implementing non-resource-specific SDK functionality. diff --git a/src/core/api-promise.ts b/src/core/api-promise.ts deleted file mode 100644 index 388c06f..0000000 --- a/src/core/api-promise.ts +++ /dev/null @@ -1,92 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { type Brapi } from '../client'; - -import { type PromiseOrValue } from '../internal/types'; -import { APIResponseProps, defaultParseResponse } from '../internal/parse'; - -/** - * A subclass of `Promise` providing additional helper methods - * for interacting with the SDK. - */ -export class APIPromise extends Promise { - private parsedPromise: Promise | undefined; - #client: Brapi; - - constructor( - client: Brapi, - private responsePromise: Promise, - private parseResponse: ( - client: Brapi, - props: APIResponseProps, - ) => PromiseOrValue = defaultParseResponse, - ) { - super((resolve) => { - // this is maybe a bit weird but this has to be a no-op to not implicitly - // parse the response body; instead .then, .catch, .finally are overridden - // to parse the response - resolve(null as any); - }); - this.#client = client; - } - - _thenUnwrap(transform: (data: T, props: APIResponseProps) => U): APIPromise { - return new APIPromise(this.#client, this.responsePromise, async (client, props) => - transform(await this.parseResponse(client, props), props), - ); - } - - /** - * Gets the raw `Response` instance instead of parsing the response - * data. - * - * If you want to parse the response body but still get the `Response` - * instance, you can use {@link withResponse()}. - * - * 👋 Getting the wrong TypeScript type for `Response`? - * Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]` - * to your `tsconfig.json`. - */ - asResponse(): Promise { - return this.responsePromise.then((p) => p.response); - } - - /** - * Gets the parsed response data and the raw `Response` instance. - * - * If you just want to get the raw `Response` instance without parsing it, - * you can use {@link asResponse()}. - * - * 👋 Getting the wrong TypeScript type for `Response`? - * Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]` - * to your `tsconfig.json`. - */ - async withResponse(): Promise<{ data: T; response: Response }> { - const [data, response] = await Promise.all([this.parse(), this.asResponse()]); - return { data, response }; - } - - private parse(): Promise { - if (!this.parsedPromise) { - this.parsedPromise = this.responsePromise.then((data) => this.parseResponse(this.#client, data)); - } - return this.parsedPromise; - } - - override then( - onfulfilled?: ((value: T) => TResult1 | PromiseLike) | undefined | null, - onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null, - ): Promise { - return this.parse().then(onfulfilled, onrejected); - } - - override catch( - onrejected?: ((reason: any) => TResult | PromiseLike) | undefined | null, - ): Promise { - return this.parse().catch(onrejected); - } - - override finally(onfinally?: (() => void) | undefined | null): Promise { - return this.parse().finally(onfinally); - } -} diff --git a/src/core/error.ts b/src/core/error.ts deleted file mode 100644 index 6dd9dc4..0000000 --- a/src/core/error.ts +++ /dev/null @@ -1,130 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { castToError } from '../internal/errors'; - -export class BrapiError extends Error {} - -export class APIError< - TStatus extends number | undefined = number | undefined, - THeaders extends Headers | undefined = Headers | undefined, - TError extends Object | undefined = Object | undefined, -> extends BrapiError { - /** HTTP status for the response that caused the error */ - readonly status: TStatus; - /** HTTP headers for the response that caused the error */ - readonly headers: THeaders; - /** JSON body of the response that caused the error */ - readonly error: TError; - - constructor(status: TStatus, error: TError, message: string | undefined, headers: THeaders) { - super(`${APIError.makeMessage(status, error, message)}`); - this.status = status; - this.headers = headers; - this.error = error; - } - - private static makeMessage(status: number | undefined, error: any, message: string | undefined) { - const msg = - error?.message ? - typeof error.message === 'string' ? - error.message - : JSON.stringify(error.message) - : error ? JSON.stringify(error) - : message; - - if (status && msg) { - return `${status} ${msg}`; - } - if (status) { - return `${status} status code (no body)`; - } - if (msg) { - return msg; - } - return '(no status code or body)'; - } - - static generate( - status: number | undefined, - errorResponse: Object | undefined, - message: string | undefined, - headers: Headers | undefined, - ): APIError { - if (!status || !headers) { - return new APIConnectionError({ message, cause: castToError(errorResponse) }); - } - - const error = errorResponse as Record; - - if (status === 400) { - return new BadRequestError(status, error, message, headers); - } - - if (status === 401) { - return new AuthenticationError(status, error, message, headers); - } - - if (status === 403) { - return new PermissionDeniedError(status, error, message, headers); - } - - if (status === 404) { - return new NotFoundError(status, error, message, headers); - } - - if (status === 409) { - return new ConflictError(status, error, message, headers); - } - - if (status === 422) { - return new UnprocessableEntityError(status, error, message, headers); - } - - if (status === 429) { - return new RateLimitError(status, error, message, headers); - } - - if (status >= 500) { - return new InternalServerError(status, error, message, headers); - } - - return new APIError(status, error, message, headers); - } -} - -export class APIUserAbortError extends APIError { - constructor({ message }: { message?: string } = {}) { - super(undefined, undefined, message || 'Request was aborted.', undefined); - } -} - -export class APIConnectionError extends APIError { - constructor({ message, cause }: { message?: string | undefined; cause?: Error | undefined }) { - super(undefined, undefined, message || 'Connection error.', undefined); - // in some environments the 'cause' property is already declared - // @ts-ignore - if (cause) this.cause = cause; - } -} - -export class APIConnectionTimeoutError extends APIConnectionError { - constructor({ message }: { message?: string } = {}) { - super({ message: message ?? 'Request timed out.' }); - } -} - -export class BadRequestError extends APIError<400, Headers> {} - -export class AuthenticationError extends APIError<401, Headers> {} - -export class PermissionDeniedError extends APIError<403, Headers> {} - -export class NotFoundError extends APIError<404, Headers> {} - -export class ConflictError extends APIError<409, Headers> {} - -export class UnprocessableEntityError extends APIError<422, Headers> {} - -export class RateLimitError extends APIError<429, Headers> {} - -export class InternalServerError extends APIError {} diff --git a/src/core/resource.ts b/src/core/resource.ts deleted file mode 100644 index 01df067..0000000 --- a/src/core/resource.ts +++ /dev/null @@ -1,11 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import type { Brapi } from '../client'; - -export abstract class APIResource { - protected _client: Brapi; - - constructor(client: Brapi) { - this._client = client; - } -} diff --git a/src/core/uploads.ts b/src/core/uploads.ts deleted file mode 100644 index 2882ca6..0000000 --- a/src/core/uploads.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { type Uploadable } from '../internal/uploads'; -export { toFile, type ToFileInput } from '../internal/to-file'; diff --git a/src/error.ts b/src/error.ts deleted file mode 100644 index fc55f46..0000000 --- a/src/error.ts +++ /dev/null @@ -1,2 +0,0 @@ -/** @deprecated Import from ./core/error instead */ -export * from './core/error'; diff --git a/src/index.ts b/src/index.ts deleted file mode 100644 index 4a032b9..0000000 --- a/src/index.ts +++ /dev/null @@ -1,22 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export { Brapi as default } from './client'; - -export { type Uploadable, toFile } from './core/uploads'; -export { APIPromise } from './core/api-promise'; -export { Brapi, type ClientOptions } from './client'; -export { - BrapiError, - APIError, - APIConnectionError, - APIConnectionTimeoutError, - APIUserAbortError, - NotFoundError, - ConflictError, - RateLimitError, - BadRequestError, - AuthenticationError, - InternalServerError, - PermissionDeniedError, - UnprocessableEntityError, -} from './core/error'; diff --git a/src/internal/README.md b/src/internal/README.md deleted file mode 100644 index 3ef5a25..0000000 --- a/src/internal/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# `internal` - -The modules in this directory are not importable outside this package and will change between releases. diff --git a/src/internal/builtin-types.ts b/src/internal/builtin-types.ts deleted file mode 100644 index c23d3bd..0000000 --- a/src/internal/builtin-types.ts +++ /dev/null @@ -1,93 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export type Fetch = (input: string | URL | Request, init?: RequestInit) => Promise; - -/** - * An alias to the builtin `RequestInit` type so we can - * easily alias it in import statements if there are name clashes. - * - * https://developer.mozilla.org/docs/Web/API/RequestInit - */ -type _RequestInit = RequestInit; - -/** - * An alias to the builtin `Response` type so we can - * easily alias it in import statements if there are name clashes. - * - * https://developer.mozilla.org/docs/Web/API/Response - */ -type _Response = Response; - -/** - * The type for the first argument to `fetch`. - * - * https://developer.mozilla.org/docs/Web/API/Window/fetch#resource - */ -type _RequestInfo = Request | URL | string; - -/** - * The type for constructing `RequestInit` Headers. - * - * https://developer.mozilla.org/docs/Web/API/RequestInit#setting_headers - */ -type _HeadersInit = RequestInit['headers']; - -/** - * The type for constructing `RequestInit` body. - * - * https://developer.mozilla.org/docs/Web/API/RequestInit#body - */ -type _BodyInit = RequestInit['body']; - -/** - * An alias to the builtin `Array` type so we can - * easily alias it in import statements if there are name clashes. - */ -type _Array = Array; - -/** - * An alias to the builtin `Record` type so we can - * easily alias it in import statements if there are name clashes. - */ -type _Record = Record; - -export type { - _Array as Array, - _BodyInit as BodyInit, - _HeadersInit as HeadersInit, - _Record as Record, - _RequestInfo as RequestInfo, - _RequestInit as RequestInit, - _Response as Response, -}; - -/** - * A copy of the builtin `EndingType` type as it isn't fully supported in certain - * environments and attempting to reference the global version will error. - * - * https://github.com/microsoft/TypeScript/blob/49ad1a3917a0ea57f5ff248159256e12bb1cb705/src/lib/dom.generated.d.ts#L27941 - */ -type EndingType = 'native' | 'transparent'; - -/** - * A copy of the builtin `BlobPropertyBag` type as it isn't fully supported in certain - * environments and attempting to reference the global version will error. - * - * https://github.com/microsoft/TypeScript/blob/49ad1a3917a0ea57f5ff248159256e12bb1cb705/src/lib/dom.generated.d.ts#L154 - * https://developer.mozilla.org/en-US/docs/Web/API/Blob/Blob#options - */ -export interface BlobPropertyBag { - endings?: EndingType; - type?: string; -} - -/** - * A copy of the builtin `FilePropertyBag` type as it isn't fully supported in certain - * environments and attempting to reference the global version will error. - * - * https://github.com/microsoft/TypeScript/blob/49ad1a3917a0ea57f5ff248159256e12bb1cb705/src/lib/dom.generated.d.ts#L503 - * https://developer.mozilla.org/en-US/docs/Web/API/File/File#options - */ -export interface FilePropertyBag extends BlobPropertyBag { - lastModified?: number; -} diff --git a/src/internal/detect-platform.ts b/src/internal/detect-platform.ts deleted file mode 100644 index e82d95c..0000000 --- a/src/internal/detect-platform.ts +++ /dev/null @@ -1,196 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { VERSION } from '../version'; - -export const isRunningInBrowser = () => { - return ( - // @ts-ignore - typeof window !== 'undefined' && - // @ts-ignore - typeof window.document !== 'undefined' && - // @ts-ignore - typeof navigator !== 'undefined' - ); -}; - -type DetectedPlatform = 'deno' | 'node' | 'edge' | 'unknown'; - -/** - * Note this does not detect 'browser'; for that, use getBrowserInfo(). - */ -function getDetectedPlatform(): DetectedPlatform { - if (typeof Deno !== 'undefined' && Deno.build != null) { - return 'deno'; - } - if (typeof EdgeRuntime !== 'undefined') { - return 'edge'; - } - if ( - Object.prototype.toString.call( - typeof (globalThis as any).process !== 'undefined' ? (globalThis as any).process : 0, - ) === '[object process]' - ) { - return 'node'; - } - return 'unknown'; -} - -declare const Deno: any; -declare const EdgeRuntime: any; -type Arch = 'x32' | 'x64' | 'arm' | 'arm64' | `other:${string}` | 'unknown'; -type PlatformName = - | 'MacOS' - | 'Linux' - | 'Windows' - | 'FreeBSD' - | 'OpenBSD' - | 'iOS' - | 'Android' - | `Other:${string}` - | 'Unknown'; -type Browser = 'ie' | 'edge' | 'chrome' | 'firefox' | 'safari'; -type PlatformProperties = { - 'X-Stainless-Lang': 'js'; - 'X-Stainless-Package-Version': string; - 'X-Stainless-OS': PlatformName; - 'X-Stainless-Arch': Arch; - 'X-Stainless-Runtime': 'node' | 'deno' | 'edge' | `browser:${Browser}` | 'unknown'; - 'X-Stainless-Runtime-Version': string; -}; -const getPlatformProperties = (): PlatformProperties => { - const detectedPlatform = getDetectedPlatform(); - if (detectedPlatform === 'deno') { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': normalizePlatform(Deno.build.os), - 'X-Stainless-Arch': normalizeArch(Deno.build.arch), - 'X-Stainless-Runtime': 'deno', - 'X-Stainless-Runtime-Version': - typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', - }; - } - if (typeof EdgeRuntime !== 'undefined') { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': `other:${EdgeRuntime}`, - 'X-Stainless-Runtime': 'edge', - 'X-Stainless-Runtime-Version': (globalThis as any).process.version, - }; - } - // Check if Node.js - if (detectedPlatform === 'node') { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': normalizePlatform((globalThis as any).process.platform ?? 'unknown'), - 'X-Stainless-Arch': normalizeArch((globalThis as any).process.arch ?? 'unknown'), - 'X-Stainless-Runtime': 'node', - 'X-Stainless-Runtime-Version': (globalThis as any).process.version ?? 'unknown', - }; - } - - const browserInfo = getBrowserInfo(); - if (browserInfo) { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': 'unknown', - 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, - 'X-Stainless-Runtime-Version': browserInfo.version, - }; - } - - // TODO add support for Cloudflare workers, etc. - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': 'unknown', - 'X-Stainless-Runtime': 'unknown', - 'X-Stainless-Runtime-Version': 'unknown', - }; -}; - -type BrowserInfo = { - browser: Browser; - version: string; -}; - -declare const navigator: { userAgent: string } | undefined; - -// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts -function getBrowserInfo(): BrowserInfo | null { - if (typeof navigator === 'undefined' || !navigator) { - return null; - } - - // NOTE: The order matters here! - const browserPatterns = [ - { key: 'edge' as const, pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'ie' as const, pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'ie' as const, pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'chrome' as const, pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'firefox' as const, pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'safari' as const, pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, - ]; - - // Find the FIRST matching browser - for (const { key, pattern } of browserPatterns) { - const match = pattern.exec(navigator.userAgent); - if (match) { - const major = match[1] || 0; - const minor = match[2] || 0; - const patch = match[3] || 0; - - return { browser: key, version: `${major}.${minor}.${patch}` }; - } - } - - return null; -} - -const normalizeArch = (arch: string): Arch => { - // Node docs: - // - https://nodejs.org/api/process.html#processarch - // Deno docs: - // - https://doc.deno.land/deno/stable/~/Deno.build - if (arch === 'x32') return 'x32'; - if (arch === 'x86_64' || arch === 'x64') return 'x64'; - if (arch === 'arm') return 'arm'; - if (arch === 'aarch64' || arch === 'arm64') return 'arm64'; - if (arch) return `other:${arch}`; - return 'unknown'; -}; - -const normalizePlatform = (platform: string): PlatformName => { - // Node platforms: - // - https://nodejs.org/api/process.html#processplatform - // Deno platforms: - // - https://doc.deno.land/deno/stable/~/Deno.build - // - https://github.com/denoland/deno/issues/14799 - - platform = platform.toLowerCase(); - - // NOTE: this iOS check is untested and may not work - // Node does not work natively on IOS, there is a fork at - // https://github.com/nodejs-mobile/nodejs-mobile - // however it is unknown at the time of writing how to detect if it is running - if (platform.includes('ios')) return 'iOS'; - if (platform === 'android') return 'Android'; - if (platform === 'darwin') return 'MacOS'; - if (platform === 'win32') return 'Windows'; - if (platform === 'freebsd') return 'FreeBSD'; - if (platform === 'openbsd') return 'OpenBSD'; - if (platform === 'linux') return 'Linux'; - if (platform) return `Other:${platform}`; - return 'Unknown'; -}; - -let _platformHeaders: PlatformProperties; -export const getPlatformHeaders = () => { - return (_platformHeaders ??= getPlatformProperties()); -}; diff --git a/src/internal/errors.ts b/src/internal/errors.ts deleted file mode 100644 index 82c7b14..0000000 --- a/src/internal/errors.ts +++ /dev/null @@ -1,33 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export function isAbortError(err: unknown) { - return ( - typeof err === 'object' && - err !== null && - // Spec-compliant fetch implementations - (('name' in err && (err as any).name === 'AbortError') || - // Expo fetch - ('message' in err && String((err as any).message).includes('FetchRequestCanceledException'))) - ); -} - -export const castToError = (err: any): Error => { - if (err instanceof Error) return err; - if (typeof err === 'object' && err !== null) { - try { - if (Object.prototype.toString.call(err) === '[object Error]') { - // @ts-ignore - not all envs have native support for cause yet - const error = new Error(err.message, err.cause ? { cause: err.cause } : {}); - if (err.stack) error.stack = err.stack; - // @ts-ignore - not all envs have native support for cause yet - if (err.cause && !error.cause) error.cause = err.cause; - if (err.name) error.name = err.name; - return error; - } - } catch {} - try { - return new Error(JSON.stringify(err)); - } catch {} - } - return new Error(err); -}; diff --git a/src/internal/headers.ts b/src/internal/headers.ts deleted file mode 100644 index c724a9d..0000000 --- a/src/internal/headers.ts +++ /dev/null @@ -1,97 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { isReadonlyArray } from './utils/values'; - -type HeaderValue = string | undefined | null; -export type HeadersLike = - | Headers - | readonly HeaderValue[][] - | Record - | undefined - | null - | NullableHeaders; - -const brand_privateNullableHeaders = /* @__PURE__ */ Symbol('brand.privateNullableHeaders'); - -/** - * @internal - * Users can pass explicit nulls to unset default headers. When we parse them - * into a standard headers type we need to preserve that information. - */ -export type NullableHeaders = { - /** Brand check, prevent users from creating a NullableHeaders. */ - [brand_privateNullableHeaders]: true; - /** Parsed headers. */ - values: Headers; - /** Set of lowercase header names explicitly set to null. */ - nulls: Set; -}; - -function* iterateHeaders(headers: HeadersLike): IterableIterator { - if (!headers) return; - - if (brand_privateNullableHeaders in headers) { - const { values, nulls } = headers; - yield* values.entries(); - for (const name of nulls) { - yield [name, null]; - } - return; - } - - let shouldClear = false; - let iter: Iterable; - if (headers instanceof Headers) { - iter = headers.entries(); - } else if (isReadonlyArray(headers)) { - iter = headers; - } else { - shouldClear = true; - iter = Object.entries(headers ?? {}); - } - for (let row of iter) { - const name = row[0]; - if (typeof name !== 'string') throw new TypeError('expected header name to be a string'); - const values = isReadonlyArray(row[1]) ? row[1] : [row[1]]; - let didClear = false; - for (const value of values) { - if (value === undefined) continue; - - // Objects keys always overwrite older headers, they never append. - // Yield a null to clear the header before adding the new values. - if (shouldClear && !didClear) { - didClear = true; - yield [name, null]; - } - yield [name, value]; - } - } -} - -export const buildHeaders = (newHeaders: HeadersLike[]): NullableHeaders => { - const targetHeaders = new Headers(); - const nullHeaders = new Set(); - for (const headers of newHeaders) { - const seenHeaders = new Set(); - for (const [name, value] of iterateHeaders(headers)) { - const lowerName = name.toLowerCase(); - if (!seenHeaders.has(lowerName)) { - targetHeaders.delete(name); - seenHeaders.add(lowerName); - } - if (value === null) { - targetHeaders.delete(name); - nullHeaders.add(lowerName); - } else { - targetHeaders.append(name, value); - nullHeaders.delete(lowerName); - } - } - } - return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders }; -}; - -export const isEmptyHeaders = (headers: HeadersLike) => { - for (const _ of iterateHeaders(headers)) return false; - return true; -}; diff --git a/src/internal/parse.ts b/src/internal/parse.ts deleted file mode 100644 index 6f5ba2e..0000000 --- a/src/internal/parse.ts +++ /dev/null @@ -1,50 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import type { FinalRequestOptions } from './request-options'; -import { type Brapi } from '../client'; -import { formatRequestDetails, loggerFor } from './utils/log'; - -export type APIResponseProps = { - response: Response; - options: FinalRequestOptions; - controller: AbortController; - requestLogID: string; - retryOfRequestLogID: string | undefined; - startTime: number; -}; - -export async function defaultParseResponse(client: Brapi, props: APIResponseProps): Promise { - const { response, requestLogID, retryOfRequestLogID, startTime } = props; - const body = await (async () => { - // fetch refuses to read the body when the status code is 204. - if (response.status === 204) { - return null as T; - } - - if (props.options.__binaryResponse) { - return response as unknown as T; - } - - const contentType = response.headers.get('content-type'); - const mediaType = contentType?.split(';')[0]?.trim(); - const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json'); - if (isJSON) { - const json = await response.json(); - return json as T; - } - - const text = await response.text(); - return text as unknown as T; - })(); - loggerFor(client).debug( - `[${requestLogID}] response parsed`, - formatRequestDetails({ - retryOfRequestLogID, - url: response.url, - status: response.status, - body, - durationMs: Date.now() - startTime, - }), - ); - return body; -} diff --git a/src/internal/qs/LICENSE.md b/src/internal/qs/LICENSE.md deleted file mode 100644 index 3fda157..0000000 --- a/src/internal/qs/LICENSE.md +++ /dev/null @@ -1,13 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2014, Nathan LaFreniere and other [contributors](https://github.com/puruvj/neoqs/graphs/contributors) All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/internal/qs/README.md b/src/internal/qs/README.md deleted file mode 100644 index 67ae04e..0000000 --- a/src/internal/qs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# qs - -This is a vendored version of [neoqs](https://github.com/PuruVJ/neoqs) which is a TypeScript rewrite of [qs](https://github.com/ljharb/qs), a query string library. diff --git a/src/internal/qs/formats.ts b/src/internal/qs/formats.ts deleted file mode 100644 index e76a742..0000000 --- a/src/internal/qs/formats.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { Format } from './types'; - -export const default_format: Format = 'RFC3986'; -export const default_formatter = (v: PropertyKey) => String(v); -export const formatters: Record string> = { - RFC1738: (v: PropertyKey) => String(v).replace(/%20/g, '+'), - RFC3986: default_formatter, -}; -export const RFC1738 = 'RFC1738'; -export const RFC3986 = 'RFC3986'; diff --git a/src/internal/qs/index.ts b/src/internal/qs/index.ts deleted file mode 100644 index c3a3620..0000000 --- a/src/internal/qs/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { default_format, formatters, RFC1738, RFC3986 } from './formats'; - -const formats = { - formatters, - RFC1738, - RFC3986, - default: default_format, -}; - -export { stringify } from './stringify'; -export { formats }; - -export type { DefaultDecoder, DefaultEncoder, Format, ParseOptions, StringifyOptions } from './types'; diff --git a/src/internal/qs/stringify.ts b/src/internal/qs/stringify.ts deleted file mode 100644 index 7e71387..0000000 --- a/src/internal/qs/stringify.ts +++ /dev/null @@ -1,385 +0,0 @@ -import { encode, is_buffer, maybe_map, has } from './utils'; -import { default_format, default_formatter, formatters } from './formats'; -import type { NonNullableProperties, StringifyOptions } from './types'; -import { isArray } from '../utils/values'; - -const array_prefix_generators = { - brackets(prefix: PropertyKey) { - return String(prefix) + '[]'; - }, - comma: 'comma', - indices(prefix: PropertyKey, key: string) { - return String(prefix) + '[' + key + ']'; - }, - repeat(prefix: PropertyKey) { - return String(prefix); - }, -}; - -const push_to_array = function (arr: any[], value_or_array: any) { - Array.prototype.push.apply(arr, isArray(value_or_array) ? value_or_array : [value_or_array]); -}; - -let toISOString; - -const defaults = { - addQueryPrefix: false, - allowDots: false, - allowEmptyArrays: false, - arrayFormat: 'indices', - charset: 'utf-8', - charsetSentinel: false, - delimiter: '&', - encode: true, - encodeDotInKeys: false, - encoder: encode, - encodeValuesOnly: false, - format: default_format, - formatter: default_formatter, - /** @deprecated */ - indices: false, - serializeDate(date) { - return (toISOString ??= Function.prototype.call.bind(Date.prototype.toISOString))(date); - }, - skipNulls: false, - strictNullHandling: false, -} as NonNullableProperties; - -function is_non_nullish_primitive(v: unknown): v is string | number | boolean | symbol | bigint { - return ( - typeof v === 'string' || - typeof v === 'number' || - typeof v === 'boolean' || - typeof v === 'symbol' || - typeof v === 'bigint' - ); -} - -const sentinel = {}; - -function inner_stringify( - object: any, - prefix: PropertyKey, - generateArrayPrefix: StringifyOptions['arrayFormat'] | ((prefix: string, key: string) => string), - commaRoundTrip: boolean, - allowEmptyArrays: boolean, - strictNullHandling: boolean, - skipNulls: boolean, - encodeDotInKeys: boolean, - encoder: StringifyOptions['encoder'], - filter: StringifyOptions['filter'], - sort: StringifyOptions['sort'], - allowDots: StringifyOptions['allowDots'], - serializeDate: StringifyOptions['serializeDate'], - format: StringifyOptions['format'], - formatter: StringifyOptions['formatter'], - encodeValuesOnly: boolean, - charset: StringifyOptions['charset'], - sideChannel: WeakMap, -) { - let obj = object; - - let tmp_sc = sideChannel; - let step = 0; - let find_flag = false; - while ((tmp_sc = tmp_sc.get(sentinel)) !== void undefined && !find_flag) { - // Where object last appeared in the ref tree - const pos = tmp_sc.get(object); - step += 1; - if (typeof pos !== 'undefined') { - if (pos === step) { - throw new RangeError('Cyclic object value'); - } else { - find_flag = true; // Break while - } - } - if (typeof tmp_sc.get(sentinel) === 'undefined') { - step = 0; - } - } - - if (typeof filter === 'function') { - obj = filter(prefix, obj); - } else if (obj instanceof Date) { - obj = serializeDate?.(obj); - } else if (generateArrayPrefix === 'comma' && isArray(obj)) { - obj = maybe_map(obj, function (value) { - if (value instanceof Date) { - return serializeDate?.(value); - } - return value; - }); - } - - if (obj === null) { - if (strictNullHandling) { - return encoder && !encodeValuesOnly ? - // @ts-expect-error - encoder(prefix, defaults.encoder, charset, 'key', format) - : prefix; - } - - obj = ''; - } - - if (is_non_nullish_primitive(obj) || is_buffer(obj)) { - if (encoder) { - const key_value = - encodeValuesOnly ? prefix - // @ts-expect-error - : encoder(prefix, defaults.encoder, charset, 'key', format); - return [ - formatter?.(key_value) + - '=' + - // @ts-expect-error - formatter?.(encoder(obj, defaults.encoder, charset, 'value', format)), - ]; - } - return [formatter?.(prefix) + '=' + formatter?.(String(obj))]; - } - - const values: string[] = []; - - if (typeof obj === 'undefined') { - return values; - } - - let obj_keys; - if (generateArrayPrefix === 'comma' && isArray(obj)) { - // we need to join elements in - if (encodeValuesOnly && encoder) { - // @ts-expect-error values only - obj = maybe_map(obj, encoder); - } - obj_keys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; - } else if (isArray(filter)) { - obj_keys = filter; - } else { - const keys = Object.keys(obj); - obj_keys = sort ? keys.sort(sort) : keys; - } - - const encoded_prefix = encodeDotInKeys ? String(prefix).replace(/\./g, '%2E') : String(prefix); - - const adjusted_prefix = - commaRoundTrip && isArray(obj) && obj.length === 1 ? encoded_prefix + '[]' : encoded_prefix; - - if (allowEmptyArrays && isArray(obj) && obj.length === 0) { - return adjusted_prefix + '[]'; - } - - for (let j = 0; j < obj_keys.length; ++j) { - const key = obj_keys[j]; - const value = - // @ts-ignore - typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key as any]; - - if (skipNulls && value === null) { - continue; - } - - // @ts-ignore - const encoded_key = allowDots && encodeDotInKeys ? (key as any).replace(/\./g, '%2E') : key; - const key_prefix = - isArray(obj) ? - typeof generateArrayPrefix === 'function' ? - generateArrayPrefix(adjusted_prefix, encoded_key) - : adjusted_prefix - : adjusted_prefix + (allowDots ? '.' + encoded_key : '[' + encoded_key + ']'); - - sideChannel.set(object, step); - const valueSideChannel = new WeakMap(); - valueSideChannel.set(sentinel, sideChannel); - push_to_array( - values, - inner_stringify( - value, - key_prefix, - generateArrayPrefix, - commaRoundTrip, - allowEmptyArrays, - strictNullHandling, - skipNulls, - encodeDotInKeys, - // @ts-ignore - generateArrayPrefix === 'comma' && encodeValuesOnly && isArray(obj) ? null : encoder, - filter, - sort, - allowDots, - serializeDate, - format, - formatter, - encodeValuesOnly, - charset, - valueSideChannel, - ), - ); - } - - return values; -} - -function normalize_stringify_options( - opts: StringifyOptions = defaults, -): NonNullableProperties> & { indices?: boolean } { - if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { - throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); - } - - if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') { - throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided'); - } - - if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { - throw new TypeError('Encoder has to be a function.'); - } - - const charset = opts.charset || defaults.charset; - if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { - throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); - } - - let format = default_format; - if (typeof opts.format !== 'undefined') { - if (!has(formatters, opts.format)) { - throw new TypeError('Unknown format option provided.'); - } - format = opts.format; - } - const formatter = formatters[format]; - - let filter = defaults.filter; - if (typeof opts.filter === 'function' || isArray(opts.filter)) { - filter = opts.filter; - } - - let arrayFormat: StringifyOptions['arrayFormat']; - if (opts.arrayFormat && opts.arrayFormat in array_prefix_generators) { - arrayFormat = opts.arrayFormat; - } else if ('indices' in opts) { - arrayFormat = opts.indices ? 'indices' : 'repeat'; - } else { - arrayFormat = defaults.arrayFormat; - } - - if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { - throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); - } - - const allowDots = - typeof opts.allowDots === 'undefined' ? - !!opts.encodeDotInKeys === true ? - true - : defaults.allowDots - : !!opts.allowDots; - - return { - addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, - // @ts-ignore - allowDots: allowDots, - allowEmptyArrays: - typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, - arrayFormat: arrayFormat, - charset: charset, - charsetSentinel: - typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, - commaRoundTrip: !!opts.commaRoundTrip, - delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, - encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, - encodeDotInKeys: - typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys, - encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, - encodeValuesOnly: - typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, - filter: filter, - format: format, - formatter: formatter, - serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, - skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, - // @ts-ignore - sort: typeof opts.sort === 'function' ? opts.sort : null, - strictNullHandling: - typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling, - }; -} - -export function stringify(object: any, opts: StringifyOptions = {}) { - let obj = object; - const options = normalize_stringify_options(opts); - - let obj_keys: PropertyKey[] | undefined; - let filter; - - if (typeof options.filter === 'function') { - filter = options.filter; - obj = filter('', obj); - } else if (isArray(options.filter)) { - filter = options.filter; - obj_keys = filter; - } - - const keys: string[] = []; - - if (typeof obj !== 'object' || obj === null) { - return ''; - } - - const generateArrayPrefix = array_prefix_generators[options.arrayFormat]; - const commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip; - - if (!obj_keys) { - obj_keys = Object.keys(obj); - } - - if (options.sort) { - obj_keys.sort(options.sort); - } - - const sideChannel = new WeakMap(); - for (let i = 0; i < obj_keys.length; ++i) { - const key = obj_keys[i]!; - - if (options.skipNulls && obj[key] === null) { - continue; - } - push_to_array( - keys, - inner_stringify( - obj[key], - key, - // @ts-expect-error - generateArrayPrefix, - commaRoundTrip, - options.allowEmptyArrays, - options.strictNullHandling, - options.skipNulls, - options.encodeDotInKeys, - options.encode ? options.encoder : null, - options.filter, - options.sort, - options.allowDots, - options.serializeDate, - options.format, - options.formatter, - options.encodeValuesOnly, - options.charset, - sideChannel, - ), - ); - } - - const joined = keys.join(options.delimiter); - let prefix = options.addQueryPrefix === true ? '?' : ''; - - if (options.charsetSentinel) { - if (options.charset === 'iso-8859-1') { - // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark - prefix += 'utf8=%26%2310003%3B&'; - } else { - // encodeURIComponent('✓') - prefix += 'utf8=%E2%9C%93&'; - } - } - - return joined.length > 0 ? prefix + joined : ''; -} diff --git a/src/internal/qs/types.ts b/src/internal/qs/types.ts deleted file mode 100644 index 7c28dbb..0000000 --- a/src/internal/qs/types.ts +++ /dev/null @@ -1,71 +0,0 @@ -export type Format = 'RFC1738' | 'RFC3986'; - -export type DefaultEncoder = (str: any, defaultEncoder?: any, charset?: string) => string; -export type DefaultDecoder = (str: string, decoder?: any, charset?: string) => string; - -export type BooleanOptional = boolean | undefined; - -export type StringifyBaseOptions = { - delimiter?: string; - allowDots?: boolean; - encodeDotInKeys?: boolean; - strictNullHandling?: boolean; - skipNulls?: boolean; - encode?: boolean; - encoder?: ( - str: any, - defaultEncoder: DefaultEncoder, - charset: string, - type: 'key' | 'value', - format?: Format, - ) => string; - filter?: Array | ((prefix: PropertyKey, value: any) => any); - arrayFormat?: 'indices' | 'brackets' | 'repeat' | 'comma'; - indices?: boolean; - sort?: ((a: PropertyKey, b: PropertyKey) => number) | null; - serializeDate?: (d: Date) => string; - format?: 'RFC1738' | 'RFC3986'; - formatter?: (str: PropertyKey) => string; - encodeValuesOnly?: boolean; - addQueryPrefix?: boolean; - charset?: 'utf-8' | 'iso-8859-1'; - charsetSentinel?: boolean; - allowEmptyArrays?: boolean; - commaRoundTrip?: boolean; -}; - -export type StringifyOptions = StringifyBaseOptions; - -export type ParseBaseOptions = { - comma?: boolean; - delimiter?: string | RegExp; - depth?: number | false; - decoder?: (str: string, defaultDecoder: DefaultDecoder, charset: string, type: 'key' | 'value') => any; - arrayLimit?: number; - parseArrays?: boolean; - plainObjects?: boolean; - allowPrototypes?: boolean; - allowSparse?: boolean; - parameterLimit?: number; - strictDepth?: boolean; - strictNullHandling?: boolean; - ignoreQueryPrefix?: boolean; - charset?: 'utf-8' | 'iso-8859-1'; - charsetSentinel?: boolean; - interpretNumericEntities?: boolean; - allowEmptyArrays?: boolean; - duplicates?: 'combine' | 'first' | 'last'; - allowDots?: boolean; - decodeDotInKeys?: boolean; -}; - -export type ParseOptions = ParseBaseOptions; - -export type ParsedQs = { - [key: string]: undefined | string | string[] | ParsedQs | ParsedQs[]; -}; - -// Type to remove null or undefined union from each property -export type NonNullableProperties = { - [K in keyof T]-?: Exclude; -}; diff --git a/src/internal/qs/utils.ts b/src/internal/qs/utils.ts deleted file mode 100644 index 4cd5657..0000000 --- a/src/internal/qs/utils.ts +++ /dev/null @@ -1,265 +0,0 @@ -import { RFC1738 } from './formats'; -import type { DefaultEncoder, Format } from './types'; -import { isArray } from '../utils/values'; - -export let has = (obj: object, key: PropertyKey): boolean => ( - (has = (Object as any).hasOwn ?? Function.prototype.call.bind(Object.prototype.hasOwnProperty)), - has(obj, key) -); - -const hex_table = /* @__PURE__ */ (() => { - const array = []; - for (let i = 0; i < 256; ++i) { - array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); - } - - return array; -})(); - -function compact_queue>(queue: Array<{ obj: T; prop: string }>) { - while (queue.length > 1) { - const item = queue.pop(); - if (!item) continue; - - const obj = item.obj[item.prop]; - - if (isArray(obj)) { - const compacted: unknown[] = []; - - for (let j = 0; j < obj.length; ++j) { - if (typeof obj[j] !== 'undefined') { - compacted.push(obj[j]); - } - } - - // @ts-ignore - item.obj[item.prop] = compacted; - } - } -} - -function array_to_object(source: any[], options: { plainObjects: boolean }) { - const obj = options && options.plainObjects ? Object.create(null) : {}; - for (let i = 0; i < source.length; ++i) { - if (typeof source[i] !== 'undefined') { - obj[i] = source[i]; - } - } - - return obj; -} - -export function merge( - target: any, - source: any, - options: { plainObjects?: boolean; allowPrototypes?: boolean } = {}, -) { - if (!source) { - return target; - } - - if (typeof source !== 'object') { - if (isArray(target)) { - target.push(source); - } else if (target && typeof target === 'object') { - if ((options && (options.plainObjects || options.allowPrototypes)) || !has(Object.prototype, source)) { - target[source] = true; - } - } else { - return [target, source]; - } - - return target; - } - - if (!target || typeof target !== 'object') { - return [target].concat(source); - } - - let mergeTarget = target; - if (isArray(target) && !isArray(source)) { - // @ts-ignore - mergeTarget = array_to_object(target, options); - } - - if (isArray(target) && isArray(source)) { - source.forEach(function (item, i) { - if (has(target, i)) { - const targetItem = target[i]; - if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { - target[i] = merge(targetItem, item, options); - } else { - target.push(item); - } - } else { - target[i] = item; - } - }); - return target; - } - - return Object.keys(source).reduce(function (acc, key) { - const value = source[key]; - - if (has(acc, key)) { - acc[key] = merge(acc[key], value, options); - } else { - acc[key] = value; - } - return acc; - }, mergeTarget); -} - -export function assign_single_source(target: any, source: any) { - return Object.keys(source).reduce(function (acc, key) { - acc[key] = source[key]; - return acc; - }, target); -} - -export function decode(str: string, _: any, charset: string) { - const strWithoutPlus = str.replace(/\+/g, ' '); - if (charset === 'iso-8859-1') { - // unescape never throws, no try...catch needed: - return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); - } - // utf-8 - try { - return decodeURIComponent(strWithoutPlus); - } catch (e) { - return strWithoutPlus; - } -} - -const limit = 1024; - -export const encode: ( - str: any, - defaultEncoder: DefaultEncoder, - charset: string, - type: 'key' | 'value', - format: Format, -) => string = (str, _defaultEncoder, charset, _kind, format: Format) => { - // This code was originally written by Brian White for the io.js core querystring library. - // It has been adapted here for stricter adherence to RFC 3986 - if (str.length === 0) { - return str; - } - - let string = str; - if (typeof str === 'symbol') { - string = Symbol.prototype.toString.call(str); - } else if (typeof str !== 'string') { - string = String(str); - } - - if (charset === 'iso-8859-1') { - return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { - return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; - }); - } - - let out = ''; - for (let j = 0; j < string.length; j += limit) { - const segment = string.length >= limit ? string.slice(j, j + limit) : string; - const arr = []; - - for (let i = 0; i < segment.length; ++i) { - let c = segment.charCodeAt(i); - if ( - c === 0x2d || // - - c === 0x2e || // . - c === 0x5f || // _ - c === 0x7e || // ~ - (c >= 0x30 && c <= 0x39) || // 0-9 - (c >= 0x41 && c <= 0x5a) || // a-z - (c >= 0x61 && c <= 0x7a) || // A-Z - (format === RFC1738 && (c === 0x28 || c === 0x29)) // ( ) - ) { - arr[arr.length] = segment.charAt(i); - continue; - } - - if (c < 0x80) { - arr[arr.length] = hex_table[c]; - continue; - } - - if (c < 0x800) { - arr[arr.length] = hex_table[0xc0 | (c >> 6)]! + hex_table[0x80 | (c & 0x3f)]; - continue; - } - - if (c < 0xd800 || c >= 0xe000) { - arr[arr.length] = - hex_table[0xe0 | (c >> 12)]! + hex_table[0x80 | ((c >> 6) & 0x3f)] + hex_table[0x80 | (c & 0x3f)]; - continue; - } - - i += 1; - c = 0x10000 + (((c & 0x3ff) << 10) | (segment.charCodeAt(i) & 0x3ff)); - - arr[arr.length] = - hex_table[0xf0 | (c >> 18)]! + - hex_table[0x80 | ((c >> 12) & 0x3f)] + - hex_table[0x80 | ((c >> 6) & 0x3f)] + - hex_table[0x80 | (c & 0x3f)]; - } - - out += arr.join(''); - } - - return out; -}; - -export function compact(value: any) { - const queue = [{ obj: { o: value }, prop: 'o' }]; - const refs = []; - - for (let i = 0; i < queue.length; ++i) { - const item = queue[i]; - // @ts-ignore - const obj = item.obj[item.prop]; - - const keys = Object.keys(obj); - for (let j = 0; j < keys.length; ++j) { - const key = keys[j]!; - const val = obj[key]; - if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { - queue.push({ obj: obj, prop: key }); - refs.push(val); - } - } - } - - compact_queue(queue); - - return value; -} - -export function is_regexp(obj: any) { - return Object.prototype.toString.call(obj) === '[object RegExp]'; -} - -export function is_buffer(obj: any) { - if (!obj || typeof obj !== 'object') { - return false; - } - - return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); -} - -export function combine(a: any, b: any) { - return [].concat(a, b); -} - -export function maybe_map(val: T[], fn: (v: T) => T) { - if (isArray(val)) { - const mapped = []; - for (let i = 0; i < val.length; i += 1) { - mapped.push(fn(val[i]!)); - } - return mapped; - } - return fn(val); -} diff --git a/src/internal/request-options.ts b/src/internal/request-options.ts deleted file mode 100644 index 2aabf9a..0000000 --- a/src/internal/request-options.ts +++ /dev/null @@ -1,91 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { NullableHeaders } from './headers'; - -import type { BodyInit } from './builtin-types'; -import type { HTTPMethod, MergedRequestInit } from './types'; -import { type HeadersLike } from './headers'; - -export type FinalRequestOptions = RequestOptions & { method: HTTPMethod; path: string }; - -export type RequestOptions = { - /** - * The HTTP method for the request (e.g., 'get', 'post', 'put', 'delete'). - */ - method?: HTTPMethod; - - /** - * The URL path for the request. - * - * @example "/v1/foo" - */ - path?: string; - - /** - * Query parameters to include in the request URL. - */ - query?: object | undefined | null; - - /** - * The request body. Can be a string, JSON object, FormData, or other supported types. - */ - body?: unknown; - - /** - * HTTP headers to include with the request. Can be a Headers object, plain object, or array of tuples. - */ - headers?: HeadersLike; - - /** - * The maximum number of times that the client will retry a request in case of a - * temporary failure, like a network error or a 5XX error from the server. - * - * @default 2 - */ - maxRetries?: number; - - stream?: boolean | undefined; - - /** - * The maximum amount of time (in milliseconds) that the client should wait for a response - * from the server before timing out a single request. - * - * @unit milliseconds - */ - timeout?: number; - - /** - * Additional `RequestInit` options to be passed to the underlying `fetch` call. - * These options will be merged with the client's default fetch options. - */ - fetchOptions?: MergedRequestInit; - - /** - * An AbortSignal that can be used to cancel the request. - */ - signal?: AbortSignal | undefined | null; - - /** - * A unique key for this request to enable idempotency. - */ - idempotencyKey?: string; - - /** - * Override the default base URL for this specific request. - */ - defaultBaseURL?: string | undefined; - - __binaryResponse?: boolean | undefined; -}; - -export type EncodedContent = { bodyHeaders: HeadersLike; body: BodyInit }; -export type RequestEncoder = (request: { headers: NullableHeaders; body: unknown }) => EncodedContent; - -export const FallbackEncoder: RequestEncoder = ({ headers, body }) => { - return { - bodyHeaders: { - 'content-type': 'application/json', - }, - body: JSON.stringify(body), - }; -}; diff --git a/src/internal/shim-types.ts b/src/internal/shim-types.ts deleted file mode 100644 index 8ddf7b0..0000000 --- a/src/internal/shim-types.ts +++ /dev/null @@ -1,26 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -/** - * Shims for types that we can't always rely on being available globally. - * - * Note: these only exist at the type-level, there is no corresponding runtime - * version for any of these symbols. - */ - -type NeverToAny = T extends never ? any : T; - -/** @ts-ignore */ -type _DOMReadableStream = globalThis.ReadableStream; - -/** @ts-ignore */ -type _NodeReadableStream = import('stream/web').ReadableStream; - -type _ConditionalNodeReadableStream = - typeof globalThis extends { ReadableStream: any } ? never : _NodeReadableStream; - -type _ReadableStream = NeverToAny< - | ([0] extends [1 & _DOMReadableStream] ? never : _DOMReadableStream) - | ([0] extends [1 & _ConditionalNodeReadableStream] ? never : _ConditionalNodeReadableStream) ->; - -export type { _ReadableStream as ReadableStream }; diff --git a/src/internal/shims.ts b/src/internal/shims.ts deleted file mode 100644 index fbf9f1e..0000000 --- a/src/internal/shims.ts +++ /dev/null @@ -1,107 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -/** - * This module provides internal shims and utility functions for environments where certain Node.js or global types may not be available. - * - * These are used to ensure we can provide a consistent behaviour between different JavaScript environments and good error - * messages in cases where an environment isn't fully supported. - */ - -import type { Fetch } from './builtin-types'; -import type { ReadableStream } from './shim-types'; - -export function getDefaultFetch(): Fetch { - if (typeof fetch !== 'undefined') { - return fetch as any; - } - - throw new Error( - '`fetch` is not defined as a global; Either pass `fetch` to the client, `new Brapi({ fetch })` or polyfill the global, `globalThis.fetch = fetch`', - ); -} - -type ReadableStreamArgs = ConstructorParameters; - -export function makeReadableStream(...args: ReadableStreamArgs): ReadableStream { - const ReadableStream = (globalThis as any).ReadableStream; - if (typeof ReadableStream === 'undefined') { - // Note: All of the platforms / runtimes we officially support already define - // `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes. - throw new Error( - '`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`', - ); - } - - return new ReadableStream(...args); -} - -export function ReadableStreamFrom(iterable: Iterable | AsyncIterable): ReadableStream { - let iter: AsyncIterator | Iterator = - Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator](); - - return makeReadableStream({ - start() {}, - async pull(controller: any) { - const { done, value } = await iter.next(); - if (done) { - controller.close(); - } else { - controller.enqueue(value); - } - }, - async cancel() { - await iter.return?.(); - }, - }); -} - -/** - * Most browsers don't yet have async iterable support for ReadableStream, - * and Node has a very different way of reading bytes from its "ReadableStream". - * - * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 - */ -export function ReadableStreamToAsyncIterable(stream: any): AsyncIterableIterator { - if (stream[Symbol.asyncIterator]) return stream; - - const reader = stream.getReader(); - return { - async next() { - try { - const result = await reader.read(); - if (result?.done) reader.releaseLock(); // release lock when stream becomes closed - return result; - } catch (e) { - reader.releaseLock(); // release lock when stream becomes errored - throw e; - } - }, - async return() { - const cancelPromise = reader.cancel(); - reader.releaseLock(); - await cancelPromise; - return { done: true, value: undefined }; - }, - [Symbol.asyncIterator]() { - return this; - }, - }; -} - -/** - * Cancels a ReadableStream we don't need to consume. - * See https://undici.nodejs.org/#/?id=garbage-collection - */ -export async function CancelReadableStream(stream: any): Promise { - if (stream === null || typeof stream !== 'object') return; - - if (stream[Symbol.asyncIterator]) { - await stream[Symbol.asyncIterator]().return?.(); - return; - } - - const reader = stream.getReader(); - const cancelPromise = reader.cancel(); - reader.releaseLock(); - await cancelPromise; -} diff --git a/src/internal/to-file.ts b/src/internal/to-file.ts deleted file mode 100644 index 30eada3..0000000 --- a/src/internal/to-file.ts +++ /dev/null @@ -1,154 +0,0 @@ -import { BlobPart, getName, makeFile, isAsyncIterable } from './uploads'; -import type { FilePropertyBag } from './builtin-types'; -import { checkFileSupport } from './uploads'; - -type BlobLikePart = string | ArrayBuffer | ArrayBufferView | BlobLike | DataView; - -/** - * Intended to match DOM Blob, node-fetch Blob, node:buffer Blob, etc. - * Don't add arrayBuffer here, node-fetch doesn't have it - */ -interface BlobLike { - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) */ - readonly size: number; - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) */ - readonly type: string; - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) */ - text(): Promise; - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) */ - slice(start?: number, end?: number): BlobLike; -} - -/** - * This check adds the arrayBuffer() method type because it is available and used at runtime - */ -const isBlobLike = (value: any): value is BlobLike & { arrayBuffer(): Promise } => - value != null && - typeof value === 'object' && - typeof value.size === 'number' && - typeof value.type === 'string' && - typeof value.text === 'function' && - typeof value.slice === 'function' && - typeof value.arrayBuffer === 'function'; - -/** - * Intended to match DOM File, node:buffer File, undici File, etc. - */ -interface FileLike extends BlobLike { - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) */ - readonly lastModified: number; - /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) */ - readonly name?: string | undefined; -} - -/** - * This check adds the arrayBuffer() method type because it is available and used at runtime - */ -const isFileLike = (value: any): value is FileLike & { arrayBuffer(): Promise } => - value != null && - typeof value === 'object' && - typeof value.name === 'string' && - typeof value.lastModified === 'number' && - isBlobLike(value); - -/** - * Intended to match DOM Response, node-fetch Response, undici Response, etc. - */ -export interface ResponseLike { - url: string; - blob(): Promise; -} - -const isResponseLike = (value: any): value is ResponseLike => - value != null && - typeof value === 'object' && - typeof value.url === 'string' && - typeof value.blob === 'function'; - -export type ToFileInput = - | FileLike - | ResponseLike - | Exclude - | AsyncIterable; - -/** - * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats - * @param value the raw content of the file. Can be an {@link Uploadable}, BlobLikePart, or AsyncIterable of BlobLikeParts - * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible - * @param {Object=} options additional properties - * @param {string=} options.type the MIME type of the content - * @param {number=} options.lastModified the last modified timestamp - * @returns a {@link File} with the given properties - */ -export async function toFile( - value: ToFileInput | PromiseLike, - name?: string | null | undefined, - options?: FilePropertyBag | undefined, -): Promise { - checkFileSupport(); - - // If it's a promise, resolve it. - value = await value; - - // If we've been given a `File` we don't need to do anything - if (isFileLike(value)) { - if (value instanceof File) { - return value; - } - return makeFile([await value.arrayBuffer()], value.name); - } - - if (isResponseLike(value)) { - const blob = await value.blob(); - name ||= new URL(value.url).pathname.split(/[\\/]/).pop(); - - return makeFile(await getBytes(blob), name, options); - } - - const parts = await getBytes(value); - - name ||= getName(value); - - if (!options?.type) { - const type = parts.find((part) => typeof part === 'object' && 'type' in part && part.type); - if (typeof type === 'string') { - options = { ...options, type }; - } - } - - return makeFile(parts, name, options); -} - -async function getBytes(value: BlobLikePart | AsyncIterable): Promise> { - let parts: Array = []; - if ( - typeof value === 'string' || - ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. - value instanceof ArrayBuffer - ) { - parts.push(value); - } else if (isBlobLike(value)) { - parts.push(value instanceof Blob ? value : await value.arrayBuffer()); - } else if ( - isAsyncIterable(value) // includes Readable, ReadableStream, etc. - ) { - for await (const chunk of value) { - parts.push(...(await getBytes(chunk as BlobLikePart))); // TODO, consider validating? - } - } else { - const constructor = value?.constructor?.name; - throw new Error( - `Unexpected data type: ${typeof value}${ - constructor ? `; constructor: ${constructor}` : '' - }${propsForError(value)}`, - ); - } - - return parts; -} - -function propsForError(value: unknown): string { - if (typeof value !== 'object' || value === null) return ''; - const props = Object.getOwnPropertyNames(value); - return `; props: [${props.map((p) => `"${p}"`).join(', ')}]`; -} diff --git a/src/internal/types.ts b/src/internal/types.ts deleted file mode 100644 index b668dfc..0000000 --- a/src/internal/types.ts +++ /dev/null @@ -1,95 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export type PromiseOrValue = T | Promise; -export type HTTPMethod = 'get' | 'post' | 'put' | 'patch' | 'delete'; - -export type KeysEnum = { [P in keyof Required]: true }; - -export type FinalizedRequestInit = RequestInit & { headers: Headers }; - -type NotAny = [0] extends [1 & T] ? never : T; - -/** - * Some environments overload the global fetch function, and Parameters only gets the last signature. - */ -type OverloadedParameters = - T extends ( - { - (...args: infer A): unknown; - (...args: infer B): unknown; - (...args: infer C): unknown; - (...args: infer D): unknown; - } - ) ? - A | B | C | D - : T extends ( - { - (...args: infer A): unknown; - (...args: infer B): unknown; - (...args: infer C): unknown; - } - ) ? - A | B | C - : T extends ( - { - (...args: infer A): unknown; - (...args: infer B): unknown; - } - ) ? - A | B - : T extends (...args: infer A) => unknown ? A - : never; - -/* eslint-disable */ -/** - * These imports attempt to get types from a parent package's dependencies. - * Unresolved bare specifiers can trigger [automatic type acquisition][1] in some projects, which - * would cause typescript to show types not present at runtime. To avoid this, we import - * directly from parent node_modules folders. - * - * We need to check multiple levels because we don't know what directory structure we'll be in. - * For example, pnpm generates directories like this: - * ``` - * node_modules - * ├── .pnpm - * │ └── pkg@1.0.0 - * │ └── node_modules - * │ └── pkg - * │ └── internal - * │ └── types.d.ts - * ├── pkg -> .pnpm/pkg@1.0.0/node_modules/pkg - * └── undici - * ``` - * - * [1]: https://www.typescriptlang.org/tsconfig/#typeAcquisition - */ -/** @ts-ignore For users with \@types/node */ -type UndiciTypesRequestInit = NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny; -/** @ts-ignore For users with undici */ -type UndiciRequestInit = NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny; -/** @ts-ignore For users with \@types/bun */ -type BunRequestInit = globalThis.FetchRequestInit; -/** @ts-ignore For users with node-fetch@2 */ -type NodeFetch2RequestInit = NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny; -/** @ts-ignore For users with node-fetch@3, doesn't need file extension because types are at ./@types/index.d.ts */ -type NodeFetch3RequestInit = NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny | NotAny; -/** @ts-ignore For users who use Deno */ -type FetchRequestInit = NonNullable[1]>; -/* eslint-enable */ - -type RequestInits = - | NotAny - | NotAny - | NotAny - | NotAny - | NotAny - | NotAny - | NotAny; - -/** - * This type contains `RequestInit` options that may be available on the current runtime, - * including per-platform extensions like `dispatcher`, `agent`, `client`, etc. - */ -export type MergedRequestInit = RequestInits & - /** We don't include these in the types as they'll be overridden for every request. */ - Partial>; diff --git a/src/internal/uploads.ts b/src/internal/uploads.ts deleted file mode 100644 index 8203c82..0000000 --- a/src/internal/uploads.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { type RequestOptions } from './request-options'; -import type { FilePropertyBag, Fetch } from './builtin-types'; -import type { Brapi } from '../client'; -import { ReadableStreamFrom } from './shims'; - -export type BlobPart = string | ArrayBuffer | ArrayBufferView | Blob | DataView; -type FsReadStream = AsyncIterable & { path: string | { toString(): string } }; - -// https://github.com/oven-sh/bun/issues/5980 -interface BunFile extends Blob { - readonly name?: string | undefined; -} - -export const checkFileSupport = () => { - if (typeof File === 'undefined') { - const { process } = globalThis as any; - const isOldNode = - typeof process?.versions?.node === 'string' && parseInt(process.versions.node.split('.')) < 20; - throw new Error( - '`File` is not defined as a global, which is required for file uploads.' + - (isOldNode ? - " Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`." - : ''), - ); - } -}; - -/** - * Typically, this is a native "File" class. - * - * We provide the {@link toFile} utility to convert a variety of objects - * into the File class. - * - * For convenience, you can also pass a fetch Response, or in Node, - * the result of fs.createReadStream(). - */ -export type Uploadable = File | Response | FsReadStream | BunFile; - -/** - * Construct a `File` instance. This is used to ensure a helpful error is thrown - * for environments that don't define a global `File` yet. - */ -export function makeFile( - fileBits: BlobPart[], - fileName: string | undefined, - options?: FilePropertyBag, -): File { - checkFileSupport(); - return new File(fileBits as any, fileName ?? 'unknown_file', options); -} - -export function getName(value: any): string | undefined { - return ( - ( - (typeof value === 'object' && - value !== null && - (('name' in value && value.name && String(value.name)) || - ('url' in value && value.url && String(value.url)) || - ('filename' in value && value.filename && String(value.filename)) || - ('path' in value && value.path && String(value.path)))) || - '' - ) - .split(/[\\/]/) - .pop() || undefined - ); -} - -export const isAsyncIterable = (value: any): value is AsyncIterable => - value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; - -/** - * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. - * Otherwise returns the request as is. - */ -export const maybeMultipartFormRequestOptions = async ( - opts: RequestOptions, - fetch: Brapi | Fetch, -): Promise => { - if (!hasUploadableValue(opts.body)) return opts; - - return { ...opts, body: await createForm(opts.body, fetch) }; -}; - -type MultipartFormRequestOptions = Omit & { body: unknown }; - -export const multipartFormRequestOptions = async ( - opts: MultipartFormRequestOptions, - fetch: Brapi | Fetch, -): Promise => { - return { ...opts, body: await createForm(opts.body, fetch) }; -}; - -const supportsFormDataMap = /* @__PURE__ */ new WeakMap>(); - -/** - * node-fetch doesn't support the global FormData object in recent node versions. Instead of sending - * properly-encoded form data, it just stringifies the object, resulting in a request body of "[object FormData]". - * This function detects if the fetch function provided supports the global FormData object to avoid - * confusing error messages later on. - */ -function supportsFormData(fetchObject: Brapi | Fetch): Promise { - const fetch: Fetch = typeof fetchObject === 'function' ? fetchObject : (fetchObject as any).fetch; - const cached = supportsFormDataMap.get(fetch); - if (cached) return cached; - const promise = (async () => { - try { - const FetchResponse = ( - 'Response' in fetch ? - fetch.Response - : (await fetch('data:,')).constructor) as typeof Response; - const data = new FormData(); - if (data.toString() === (await new FetchResponse(data).text())) { - return false; - } - return true; - } catch { - // avoid false negatives - return true; - } - })(); - supportsFormDataMap.set(fetch, promise); - return promise; -} - -export const createForm = async >( - body: T | undefined, - fetch: Brapi | Fetch, -): Promise => { - if (!(await supportsFormData(fetch))) { - throw new TypeError( - 'The provided fetch function does not support file uploads with the current global FormData class.', - ); - } - const form = new FormData(); - await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); - return form; -}; - -// We check for Blob not File because Bun.File doesn't inherit from File, -// but they both inherit from Blob and have a `name` property at runtime. -const isNamedBlob = (value: unknown) => value instanceof Blob && 'name' in value; - -const isUploadable = (value: unknown) => - typeof value === 'object' && - value !== null && - (value instanceof Response || isAsyncIterable(value) || isNamedBlob(value)); - -const hasUploadableValue = (value: unknown): boolean => { - if (isUploadable(value)) return true; - if (Array.isArray(value)) return value.some(hasUploadableValue); - if (value && typeof value === 'object') { - for (const k in value) { - if (hasUploadableValue((value as any)[k])) return true; - } - } - return false; -}; - -const addFormValue = async (form: FormData, key: string, value: unknown): Promise => { - if (value === undefined) return; - if (value == null) { - throw new TypeError( - `Received null for "${key}"; to pass null in FormData, you must use the string 'null'`, - ); - } - - // TODO: make nested formats configurable - if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { - form.append(key, String(value)); - } else if (value instanceof Response) { - form.append(key, makeFile([await value.blob()], getName(value))); - } else if (isAsyncIterable(value)) { - form.append(key, makeFile([await new Response(ReadableStreamFrom(value)).blob()], getName(value))); - } else if (isNamedBlob(value)) { - form.append(key, value, getName(value)); - } else if (Array.isArray(value)) { - await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); - } else if (typeof value === 'object') { - await Promise.all( - Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop)), - ); - } else { - throw new TypeError( - `Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`, - ); - } -}; diff --git a/src/internal/utils.ts b/src/internal/utils.ts deleted file mode 100644 index 3cbfacc..0000000 --- a/src/internal/utils.ts +++ /dev/null @@ -1,8 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export * from './utils/values'; -export * from './utils/base64'; -export * from './utils/env'; -export * from './utils/log'; -export * from './utils/uuid'; -export * from './utils/sleep'; diff --git a/src/internal/utils/base64.ts b/src/internal/utils/base64.ts deleted file mode 100644 index d838c73..0000000 --- a/src/internal/utils/base64.ts +++ /dev/null @@ -1,40 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { BrapiError } from '../../core/error'; -import { encodeUTF8 } from './bytes'; - -export const toBase64 = (data: string | Uint8Array | null | undefined): string => { - if (!data) return ''; - - if (typeof (globalThis as any).Buffer !== 'undefined') { - return (globalThis as any).Buffer.from(data).toString('base64'); - } - - if (typeof data === 'string') { - data = encodeUTF8(data); - } - - if (typeof btoa !== 'undefined') { - return btoa(String.fromCharCode.apply(null, data as any)); - } - - throw new BrapiError('Cannot generate base64 string; Expected `Buffer` or `btoa` to be defined'); -}; - -export const fromBase64 = (str: string): Uint8Array => { - if (typeof (globalThis as any).Buffer !== 'undefined') { - const buf = (globalThis as any).Buffer.from(str, 'base64'); - return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); - } - - if (typeof atob !== 'undefined') { - const bstr = atob(str); - const buf = new Uint8Array(bstr.length); - for (let i = 0; i < bstr.length; i++) { - buf[i] = bstr.charCodeAt(i); - } - return buf; - } - - throw new BrapiError('Cannot decode base64 string; Expected `Buffer` or `atob` to be defined'); -}; diff --git a/src/internal/utils/bytes.ts b/src/internal/utils/bytes.ts deleted file mode 100644 index 8da627a..0000000 --- a/src/internal/utils/bytes.ts +++ /dev/null @@ -1,32 +0,0 @@ -export function concatBytes(buffers: Uint8Array[]): Uint8Array { - let length = 0; - for (const buffer of buffers) { - length += buffer.length; - } - const output = new Uint8Array(length); - let index = 0; - for (const buffer of buffers) { - output.set(buffer, index); - index += buffer.length; - } - - return output; -} - -let encodeUTF8_: (str: string) => Uint8Array; -export function encodeUTF8(str: string) { - let encoder; - return ( - encodeUTF8_ ?? - ((encoder = new (globalThis as any).TextEncoder()), (encodeUTF8_ = encoder.encode.bind(encoder))) - )(str); -} - -let decodeUTF8_: (bytes: Uint8Array) => string; -export function decodeUTF8(bytes: Uint8Array) { - let decoder; - return ( - decodeUTF8_ ?? - ((decoder = new (globalThis as any).TextDecoder()), (decodeUTF8_ = decoder.decode.bind(decoder))) - )(bytes); -} diff --git a/src/internal/utils/env.ts b/src/internal/utils/env.ts deleted file mode 100644 index 2d84800..0000000 --- a/src/internal/utils/env.ts +++ /dev/null @@ -1,18 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -/** - * Read an environment variable. - * - * Trims beginning and trailing whitespace. - * - * Will return undefined if the environment variable doesn't exist or cannot be accessed. - */ -export const readEnv = (env: string): string | undefined => { - if (typeof (globalThis as any).process !== 'undefined') { - return (globalThis as any).process.env?.[env]?.trim() ?? undefined; - } - if (typeof (globalThis as any).Deno !== 'undefined') { - return (globalThis as any).Deno.env?.get?.(env)?.trim(); - } - return undefined; -}; diff --git a/src/internal/utils/log.ts b/src/internal/utils/log.ts deleted file mode 100644 index c38c2f0..0000000 --- a/src/internal/utils/log.ts +++ /dev/null @@ -1,126 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { hasOwn } from './values'; -import { type Brapi } from '../../client'; -import { RequestOptions } from '../request-options'; - -type LogFn = (message: string, ...rest: unknown[]) => void; -export type Logger = { - error: LogFn; - warn: LogFn; - info: LogFn; - debug: LogFn; -}; -export type LogLevel = 'off' | 'error' | 'warn' | 'info' | 'debug'; - -const levelNumbers = { - off: 0, - error: 200, - warn: 300, - info: 400, - debug: 500, -}; - -export const parseLogLevel = ( - maybeLevel: string | undefined, - sourceName: string, - client: Brapi, -): LogLevel | undefined => { - if (!maybeLevel) { - return undefined; - } - if (hasOwn(levelNumbers, maybeLevel)) { - return maybeLevel; - } - loggerFor(client).warn( - `${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify( - Object.keys(levelNumbers), - )}`, - ); - return undefined; -}; - -function noop() {} - -function makeLogFn(fnLevel: keyof Logger, logger: Logger | undefined, logLevel: LogLevel) { - if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) { - return noop; - } else { - // Don't wrap logger functions, we want the stacktrace intact! - return logger[fnLevel].bind(logger); - } -} - -const noopLogger = { - error: noop, - warn: noop, - info: noop, - debug: noop, -}; - -let cachedLoggers = /* @__PURE__ */ new WeakMap(); - -export function loggerFor(client: Brapi): Logger { - const logger = client.logger; - const logLevel = client.logLevel ?? 'off'; - if (!logger) { - return noopLogger; - } - - const cachedLogger = cachedLoggers.get(logger); - if (cachedLogger && cachedLogger[0] === logLevel) { - return cachedLogger[1]; - } - - const levelLogger = { - error: makeLogFn('error', logger, logLevel), - warn: makeLogFn('warn', logger, logLevel), - info: makeLogFn('info', logger, logLevel), - debug: makeLogFn('debug', logger, logLevel), - }; - - cachedLoggers.set(logger, [logLevel, levelLogger]); - - return levelLogger; -} - -export const formatRequestDetails = (details: { - options?: RequestOptions | undefined; - headers?: Headers | Record | undefined; - retryOfRequestLogID?: string | undefined; - retryOf?: string | undefined; - url?: string | undefined; - status?: number | undefined; - method?: string | undefined; - durationMs?: number | undefined; - message?: unknown; - body?: unknown; -}) => { - if (details.options) { - details.options = { ...details.options }; - delete details.options['headers']; // redundant + leaks internals - } - if (details.headers) { - details.headers = Object.fromEntries( - (details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map( - ([name, value]) => [ - name, - ( - name.toLowerCase() === 'authorization' || - name.toLowerCase() === 'cookie' || - name.toLowerCase() === 'set-cookie' - ) ? - '***' - : value, - ], - ), - ); - } - if ('retryOfRequestLogID' in details) { - if (details.retryOfRequestLogID) { - details.retryOf = details.retryOfRequestLogID; - } - delete details.retryOfRequestLogID; - } - return details; -}; diff --git a/src/internal/utils/path.ts b/src/internal/utils/path.ts deleted file mode 100644 index 4a122b7..0000000 --- a/src/internal/utils/path.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { BrapiError } from '../../core/error'; - -/** - * Percent-encode everything that isn't safe to have in a path without encoding safe chars. - * - * Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3: - * > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" - * > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "=" - * > pchar = unreserved / pct-encoded / sub-delims / ":" / "@" - */ -export function encodeURIPath(str: string) { - return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent); -} - -const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null)); - -export const createPathTagFunction = (pathEncoder = encodeURIPath) => - function path(statics: readonly string[], ...params: readonly unknown[]): string { - // If there are no params, no processing is needed. - if (statics.length === 1) return statics[0]!; - - let postPath = false; - const invalidSegments = []; - const path = statics.reduce((previousValue, currentValue, index) => { - if (/[?#]/.test(currentValue)) { - postPath = true; - } - const value = params[index]; - let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value); - if ( - index !== params.length && - (value == null || - (typeof value === 'object' && - // handle values from other realms - value.toString === - Object.getPrototypeOf(Object.getPrototypeOf((value as any).hasOwnProperty ?? EMPTY) ?? EMPTY) - ?.toString)) - ) { - encoded = value + ''; - invalidSegments.push({ - start: previousValue.length + currentValue.length, - length: encoded.length, - error: `Value of type ${Object.prototype.toString - .call(value) - .slice(8, -1)} is not a valid path parameter`, - }); - } - return previousValue + currentValue + (index === params.length ? '' : encoded); - }, ''); - - const pathOnly = path.split(/[?#]/, 1)[0]!; - const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi; - let match; - - // Find all invalid segments - while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) { - invalidSegments.push({ - start: match.index, - length: match[0].length, - error: `Value "${match[0]}" can\'t be safely passed as a path parameter`, - }); - } - - invalidSegments.sort((a, b) => a.start - b.start); - - if (invalidSegments.length > 0) { - let lastEnd = 0; - const underline = invalidSegments.reduce((acc, segment) => { - const spaces = ' '.repeat(segment.start - lastEnd); - const arrows = '^'.repeat(segment.length); - lastEnd = segment.start + segment.length; - return acc + spaces + arrows; - }, ''); - - throw new BrapiError( - `Path parameters result in path with invalid segments:\n${invalidSegments - .map((e) => e.error) - .join('\n')}\n${path}\n${underline}`, - ); - } - - return path; - }; - -/** - * URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced. - */ -export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath); diff --git a/src/internal/utils/sleep.ts b/src/internal/utils/sleep.ts deleted file mode 100644 index 65e5296..0000000 --- a/src/internal/utils/sleep.ts +++ /dev/null @@ -1,3 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); diff --git a/src/internal/utils/uuid.ts b/src/internal/utils/uuid.ts deleted file mode 100644 index b0e53aa..0000000 --- a/src/internal/utils/uuid.ts +++ /dev/null @@ -1,17 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -/** - * https://stackoverflow.com/a/2117523 - */ -export let uuid4 = function () { - const { crypto } = globalThis as any; - if (crypto?.randomUUID) { - uuid4 = crypto.randomUUID.bind(crypto); - return crypto.randomUUID(); - } - const u8 = new Uint8Array(1); - const randomByte = crypto ? () => crypto.getRandomValues(u8)[0]! : () => (Math.random() * 0xff) & 0xff; - return '10000000-1000-4000-8000-100000000000'.replace(/[018]/g, (c) => - (+c ^ (randomByte() & (15 >> (+c / 4)))).toString(16), - ); -}; diff --git a/src/internal/utils/values.ts b/src/internal/utils/values.ts deleted file mode 100644 index f246b28..0000000 --- a/src/internal/utils/values.ts +++ /dev/null @@ -1,105 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { BrapiError } from '../../core/error'; - -// https://url.spec.whatwg.org/#url-scheme-string -const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; - -export const isAbsoluteURL = (url: string): boolean => { - return startsWithSchemeRegexp.test(url); -}; - -export let isArray = (val: unknown): val is unknown[] => ((isArray = Array.isArray), isArray(val)); -export let isReadonlyArray = isArray as (val: unknown) => val is readonly unknown[]; - -/** Returns an object if the given value isn't an object, otherwise returns as-is */ -export function maybeObj(x: unknown): object { - if (typeof x !== 'object') { - return {}; - } - - return x ?? {}; -} - -// https://stackoverflow.com/a/34491287 -export function isEmptyObj(obj: Object | null | undefined): boolean { - if (!obj) return true; - for (const _k in obj) return false; - return true; -} - -// https://eslint.org/docs/latest/rules/no-prototype-builtins -export function hasOwn(obj: T, key: PropertyKey): key is keyof T { - return Object.prototype.hasOwnProperty.call(obj, key); -} - -export function isObj(obj: unknown): obj is Record { - return obj != null && typeof obj === 'object' && !Array.isArray(obj); -} - -export const ensurePresent = (value: T | null | undefined): T => { - if (value == null) { - throw new BrapiError(`Expected a value to be given but received ${value} instead.`); - } - - return value; -}; - -export const validatePositiveInteger = (name: string, n: unknown): number => { - if (typeof n !== 'number' || !Number.isInteger(n)) { - throw new BrapiError(`${name} must be an integer`); - } - if (n < 0) { - throw new BrapiError(`${name} must be a positive integer`); - } - return n; -}; - -export const coerceInteger = (value: unknown): number => { - if (typeof value === 'number') return Math.round(value); - if (typeof value === 'string') return parseInt(value, 10); - - throw new BrapiError(`Could not coerce ${value} (type: ${typeof value}) into a number`); -}; - -export const coerceFloat = (value: unknown): number => { - if (typeof value === 'number') return value; - if (typeof value === 'string') return parseFloat(value); - - throw new BrapiError(`Could not coerce ${value} (type: ${typeof value}) into a number`); -}; - -export const coerceBoolean = (value: unknown): boolean => { - if (typeof value === 'boolean') return value; - if (typeof value === 'string') return value === 'true'; - return Boolean(value); -}; - -export const maybeCoerceInteger = (value: unknown): number | undefined => { - if (value == null) { - return undefined; - } - return coerceInteger(value); -}; - -export const maybeCoerceFloat = (value: unknown): number | undefined => { - if (value == null) { - return undefined; - } - return coerceFloat(value); -}; - -export const maybeCoerceBoolean = (value: unknown): boolean | undefined => { - if (value == null) { - return undefined; - } - return coerceBoolean(value); -}; - -export const safeJSON = (text: string) => { - try { - return JSON.parse(text); - } catch (err) { - return undefined; - } -}; diff --git a/src/resource.ts b/src/resource.ts deleted file mode 100644 index 363e351..0000000 --- a/src/resource.ts +++ /dev/null @@ -1,2 +0,0 @@ -/** @deprecated Import from ./core/resource instead */ -export * from './core/resource'; diff --git a/src/resources.ts b/src/resources.ts deleted file mode 100644 index b283d57..0000000 --- a/src/resources.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './resources/index'; diff --git a/src/resources/available.ts b/src/resources/available.ts deleted file mode 100644 index 3363581..0000000 --- a/src/resources/available.ts +++ /dev/null @@ -1,98 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../core/resource'; -import { APIPromise } from '../core/api-promise'; -import { RequestOptions } from '../internal/request-options'; - -export class Available extends APIResource { - /** - * Obtenha uma lista completa de todos os tickers (identificadores) de ativos - * financeiros (ações, FIIs, BDRs, ETFs, índices) que a API Brapi tem dados - * disponíveis para consulta no endpoint `/api/quote/{tickers}`. - * - * ### Funcionalidade: - * - * - Retorna arrays separados para `indexes` (índices) e `stocks` (outros ativos). - * - Pode ser filtrado usando o parâmetro `search` para encontrar tickers - * específicos. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar todos os tickers disponíveis:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/available?token=SEU_TOKEN" - * ``` - * - * **Buscar tickers que contenham 'BBDC':** - * - * ```bash - * curl -X GET "https://brapi.dev/api/available?search=BBDC&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta é um objeto JSON com duas chaves: - * - * - `indexes`: Array de strings contendo os tickers dos índices disponíveis (ex: - * `["^BVSP", "^IFIX"]`). - * - `stocks`: Array de strings contendo os tickers das ações, FIIs, BDRs e ETFs - * disponíveis (ex: `["PETR4", "VALE3", "ITSA4", "MXRF11"]`). - */ - list( - query: AvailableListParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/available', { query, ...options }); - } -} - -/** - * Resposta do endpoint que lista todos os tickers disponíveis. - */ -export interface AvailableListResponse { - /** - * Lista de tickers de **índices** disponíveis (ex: `^BVSP`, `^IFIX`). - */ - indexes: Array; - - /** - * Lista de tickers de **ações, FIIs, BDRs e ETFs** disponíveis (ex: `PETR4`, - * `VALE3`, `MXRF11`). - */ - stocks: Array; -} - -export interface AvailableListParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Termo para filtrar a lista de tickers (correspondência parcial, - * case-insensitive). Se omitido, retorna todos os tickers. - */ - search?: string; -} - -export declare namespace Available { - export { - type AvailableListResponse as AvailableListResponse, - type AvailableListParams as AvailableListParams, - }; -} diff --git a/src/resources/index.ts b/src/resources/index.ts deleted file mode 100644 index 70a1da4..0000000 --- a/src/resources/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export { Available, type AvailableListResponse, type AvailableListParams } from './available'; -export { - Quote, - type BalanceSheetEntry, - type CashflowEntry, - type DefaultKeyStatisticsEntry, - type FinancialDataEntry, - type IncomeStatementEntry, - type ValueAddedEntry, - type QuoteRetrieveResponse, - type QuoteListResponse, - type QuoteRetrieveParams, - type QuoteListParams, -} from './quote'; -export { V2 } from './v2/v2'; diff --git a/src/resources/quote.ts b/src/resources/quote.ts deleted file mode 100644 index c2e12cc..0000000 --- a/src/resources/quote.ts +++ /dev/null @@ -1,2809 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../core/resource'; -import * as QuoteAPI from './quote'; -import { APIPromise } from '../core/api-promise'; -import { RequestOptions } from '../internal/request-options'; -import { path } from '../internal/utils/path'; - -export class Quote extends APIResource { - /** - * Este endpoint é a principal forma de obter informações detalhadas sobre um ou - * mais ativos financeiros (ações, FIIs, ETFs, BDRs, índices) listados na B3, - * identificados pelos seus respectivos **tickers**. - * - * ### Funcionalidades Principais: - * - * - **Cotação Atual:** Retorna o preço mais recente, variação diária, máximas, - * mínimas, volume, etc. - * - **Dados Históricos:** Permite solicitar séries históricas de preços usando os - * parâmetros `range` e `interval`. - * - **Dados Fundamentalistas:** Opcionalmente, inclui dados fundamentalistas - * básicos (P/L, LPA) com o parâmetro `fundamental=true`. - * - **Dividendos:** Opcionalmente, inclui histórico de dividendos e JCP com - * `dividends=true`. - * - **Módulos Adicionais:** Permite requisitar conjuntos de dados financeiros mais - * aprofundados através do parâmetro `modules` (veja detalhes abaixo). - * - * ### 🧪 Ações de Teste (Sem Autenticação): - * - * Para facilitar o desenvolvimento e teste, as seguintes **4 ações têm acesso - * irrestrito** e **não requerem autenticação**: - * - * - **PETR4** (Petrobras PN) - * - **MGLU3** (Magazine Luiza ON) - * - **VALE3** (Vale ON) - * - **ITUB4** (Itaú Unibanco PN) - * - * **Importante:** Você pode consultar essas ações sem token e com acesso a todos - * os recursos (históricos, módulos, dividendos). Porém, se misturar essas ações - * com outras na mesma requisição, a autenticação será obrigatória. - * - * ### Autenticação: - * - * Para **outras ações** (além das 4 de teste), é **obrigatório** fornecer um token - * de autenticação válido, seja via query parameter `token` ou via header - * `Authorization: Bearer seu_token`. - * - * ### Exemplos de Requisição: - * - * **1. Cotação simples de PETR4 e VALE3 (ações de teste - sem token):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/PETR4,VALE3" - * ``` - * - * **2. Cotação de MGLU3 com dados históricos do último mês (ação de teste - sem - * token):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/MGLU3?range=1mo&interval=1d" - * ``` - * - * **3. Cotação de ITUB4 incluindo dividendos e dados fundamentalistas (ação de - * teste - sem token):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/ITUB4?fundamental=true÷nds=true" - * ``` - * - * **4. Cotação de WEGE3 com Resumo da Empresa e Balanço Patrimonial Anual (via - * módulos - requer token):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/WEGE3?modules=summaryProfile,balanceSheetHistory&token=SEU_TOKEN" - * ``` - * - * **5. Exemplo de requisição mista (requer token):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/PETR4,BBAS3?token=SEU_TOKEN" - * ``` - * - * _Nota: Como BBAS3 não é uma ação de teste, toda a requisição requer - * autenticação, mesmo contendo PETR4._ - * - * ### Parâmetro `modules` (Detalhado): - * - * O parâmetro `modules` é extremamente poderoso para enriquecer a resposta com - * dados financeiros detalhados. Você pode solicitar um ou mais módulos, separados - * por vírgula. - * - * **Módulos Disponíveis:** - * - * - `summaryProfile`: Informações cadastrais da empresa (endereço, setor, - * descrição do negócio, website, número de funcionários). - * - `balanceSheetHistory`: Histórico **anual** do Balanço Patrimonial. - * - `balanceSheetHistoryQuarterly`: Histórico **trimestral** do Balanço - * Patrimonial. - * - `defaultKeyStatistics`: Principais estatísticas da empresa (Valor de Mercado, - * P/L, ROE, Dividend Yield, etc.) - **TTM (Trailing Twelve Months)**. - * - `defaultKeyStatisticsHistory`: Histórico **anual** das Principais - * Estatísticas. - * - `defaultKeyStatisticsHistoryQuarterly`: Histórico **trimestral** das - * Principais Estatísticas. - * - `incomeStatementHistory`: Histórico **anual** da Demonstração do Resultado do - * Exercício (DRE). - * - `incomeStatementHistoryQuarterly`: Histórico **trimestral** da Demonstração do - * Resultado do Exercício (DRE). - * - `financialData`: Dados financeiros selecionados (Receita, Lucro Bruto, EBITDA, - * Dívida Líquida, Fluxo de Caixa Livre, Margens) - **TTM (Trailing Twelve - * Months)**. - * - `financialDataHistory`: Histórico **anual** dos Dados Financeiros. - * - `financialDataHistoryQuarterly`: Histórico **trimestral** dos Dados - * Financeiros. - * - `valueAddedHistory`: Histórico **anual** da Demonstração do Valor Adicionado - * (DVA). - * - `valueAddedHistoryQuarterly`: Histórico **trimestral** da Demonstração do - * Valor Adicionado (DVA). - * - `cashflowHistory`: Histórico **anual** da Demonstração do Fluxo de Caixa - * (DFC). - * - `cashflowHistoryQuarterly`: Histórico **trimestral** da Demonstração do Fluxo - * de Caixa (DFC). - * - * **Exemplo de Uso do `modules`:** - * - * Para obter a cotação de BBDC4 junto com seu DRE trimestral e Fluxo de Caixa - * anual: - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/BBDC4?modules=incomeStatementHistoryQuarterly,cashflowHistory&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta é um objeto JSON contendo a chave `results`, que é um array. Cada - * elemento do array corresponde a um ticker solicitado e contém os dados da - * cotação e os módulos adicionais requisitados. - * - * - **Sucesso (200 OK):** Retorna os dados conforme solicitado. - * - **Bad Request (400 Bad Request):** Ocorre se um parâmetro for inválido (ex: - * `range=invalid`) ou se a formatação estiver incorreta. - * - **Unauthorized (401 Unauthorized):** Token inválido ou ausente. - * - **Payment Required (402 Payment Required):** Limite de requisições do plano - * atual excedido. - * - **Not Found (404 Not Found):** Um ou mais tickers solicitados não foram - * encontrados. - */ - retrieve( - tickers: string, - query: QuoteRetrieveParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get(path`/api/quote/${tickers}`, { query, ...options }); - } - - /** - * Obtenha uma lista paginada de cotações de diversos ativos (ações, FIIs, BDRs) - * negociados na B3, com opções avançadas de busca, filtragem e ordenação. - * - * ### Funcionalidades: - * - * - **Busca por Ticker:** Filtre por parte do ticker usando `search`. - * - **Filtragem por Tipo:** Restrinja a lista a `stock`, `fund` (FII) ou `bdr` com - * o parâmetro `type`. - * - **Filtragem por Setor:** Selecione ativos de um setor específico usando - * `sector`. - * - **Ordenação:** Ordene os resultados por diversos campos (preço, variação, - * volume, etc.) usando `sortBy` e `sortOrder`. - * - **Paginação:** Controle o número de resultados por página (`limit`) e a página - * desejada (`page`). - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar as 10 ações do setor Financeiro com maior volume, ordenadas de forma - * decrescente:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/list?sector=Finance&sortBy=volume&sortOrder=desc&limit=10&page=1&token=SEU_TOKEN" - * ``` - * - * **Buscar por ativos cujo ticker contenha 'ITUB' e ordenar por nome ascendente:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/quote/list?search=ITUB&sortBy=name&sortOrder=asc&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta contém a lista de `stocks` (e `indexes` relevantes), informações - * sobre os filtros aplicados, detalhes da paginação (`currentPage`, `totalPages`, - * `itemsPerPage`, `totalCount`, `hasNextPage`) e listas de setores - * (`availableSectors`) e tipos (`availableStockTypes`) disponíveis para filtragem. - */ - list( - query: QuoteListParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/quote/list', { query, ...options }); - } -} - -/** - * Representa os dados de um Balanço Patrimonial para um período específico (anual - * ou trimestral). - */ -export interface BalanceSheetEntry { - /** - * Contas a pagar (fornecedores). - */ - accountsPayable?: number | null; - - /** - * Contas a receber de clientes (bruto). - */ - accountsReceivableFromClients?: number | null; - - /** - * Lucros ou prejuízos acumulados. - */ - accumulatedProfitsOrLosses?: number | null; - - /** - * Adiantamento para futuro aumento de capital (AFAC). - */ - advanceForFutureCapitalIncrease?: number | null; - - /** - * Ativos biológicos. - */ - biologicalAssets?: number | null; - - /** - * Obrigações de capitalização. - */ - capitalization?: number | null; - - /** - * Reservas de capital (sinônimo de `capitalSurplus`). - */ - capitalReserves?: number | null; - - /** - * Reservas de capital. - */ - capitalSurplus?: number | null; - - /** - * Caixa e equivalentes de caixa. - */ - cash?: number | null; - - /** - * Depósitos compulsórios no Banco Central. - */ - centralBankCompulsoryDeposit?: number | null; - - /** - * Capital social realizado. - */ - commonStock?: number | null; - - /** - * Obrigações de previdência complementar. - */ - complementaryPension?: number | null; - - /** - * Empréstimos e depósitos compulsórios. - */ - compulsoryLoansAndDeposits?: number | null; - - /** - * Patrimônio líquido atribuível aos controladores. - */ - controllerShareholdersEquity?: number | null; - - /** - * Créditos oriundos de operações (instituições financeiras/seguradoras). - */ - creditsFromOperations?: number | null; - - /** - * Créditos com partes relacionadas. - */ - creditsWithRelatedParties?: number | null; - - /** - * Ajustes acumulados de conversão. - */ - cumulativeConversionAdjustments?: number | null; - - /** - * Tributos correntes e diferidos no ativo. - */ - currentAndDeferredTaxes?: number | null; - - /** - * Total do passivo circulante (sinônimo de `totalCurrentLiabilities`). - */ - currentLiabilities?: number | null; - - /** - * Debêntures (passivo circulante). - */ - debentures?: number | null; - - /** - * Débitos de operações de capitalização. - */ - debitsFromCapitalization?: number | null; - - /** - * Débitos de operações de previdência complementar. - */ - debitsFromComplementaryPension?: number | null; - - /** - * Débitos de operações de seguros e resseguros. - */ - debitsFromInsuranceAndReinsurance?: number | null; - - /** - * Débitos oriundos de operações. - */ - debitsFromOperations?: number | null; - - /** - * Débitos de outras operações. - */ - debitsFromOtherOperations?: number | null; - - /** - * Encargos diferidos de ativos de longo prazo. - */ - deferredLongTermAssetCharges?: number | null; - - /** - * Passivos fiscais diferidos (longo prazo). - */ - deferredLongTermLiab?: number | null; - - /** - * Despesas de comercialização diferidas. - */ - deferredSellingExpenses?: number | null; - - /** - * Tributos diferidos no ativo. - */ - deferredTaxes?: number | null; - - /** - * Data de término do período fiscal ao qual o balanço se refere (YYYY-MM-DD). - */ - endDate?: string; - - /** - * Ajustes de avaliação patrimonial. - */ - equityValuationAdjustments?: number | null; - - /** - * Ativos financeiros (agregado de instrumentos financeiros no ativo). - */ - financialAssets?: number | null; - - /** - * Ativos financeiros ao custo amortizado. - */ - financialAssetsAtAmortizedCost?: number | null; - - /** - * Ativos financeiros mensurados a valor justo por outros resultados abrangentes - * (FVOCI). - */ - financialAssetsMeasuredAtFairValueThroughOtherComprehensiveIncome?: number | null; - - /** - * Ativos financeiros mensurados a valor justo por meio do resultado (FVTPL). - */ - financialAssetsMeasuredAtFairValueThroughProfitOrLoss?: number | null; - - /** - * Investimentos financeiros mensurados ao custo amortizado. - */ - financialInvestmentsMeasuredAtAmortizedCost?: number | null; - - /** - * Investimentos financeiros mensurados a valor justo por outros resultados - * abrangentes. - */ - financialInvestmentsMeasuredAtFairValueThroughOtherComprehensiveIncome?: number | null; - - /** - * Passivos financeiros ao custo amortizado. - */ - financialLiabilitiesAtAmortizedCost?: number | null; - - /** - * Passivos financeiros mensurados a valor justo por meio do resultado. - */ - financialLiabilitiesMeasuredAtFairValueThroughIncome?: number | null; - - /** - * Fornecedores estrangeiros. - */ - foreignSuppliers?: number | null; - - /** - * Ágio por expectativa de rentabilidade futura (Goodwill). - */ - goodWill?: number | null; - - /** - * Provisões/obrigações de seguros e resseguros. - */ - insuranceAndReinsurance?: number | null; - - /** - * Ativo intangível (valor agregado). - */ - intangibleAsset?: number | null; - - /** - * Ativos intangíveis (marcas, patentes, etc.). - */ - intangibleAssets?: number | null; - - /** - * Estoques. - */ - inventory?: number | null; - - /** - * Propriedades para investimento. - */ - investmentProperties?: number | null; - - /** - * Investimentos (participações e outros). - */ - investments?: number | null; - - /** - * Financiamento por arrendamento mercantil (circulante). - */ - leaseFinancing?: number | null; - - /** - * Empréstimos e financiamentos (circulante). - */ - loansAndFinancing?: number | null; - - /** - * Empréstimos e financiamentos em moeda estrangeira (circulante). - */ - loansAndFinancingInForeignCurrency?: number | null; - - /** - * Empréstimos e financiamentos em moeda nacional (circulante). - */ - loansAndFinancingInNationalCurrency?: number | null; - - /** - * Fornecedores/contas a pagar de longo prazo. - */ - longTermAccountsPayable?: number | null; - - /** - * Contas a receber de clientes - longo prazo. - */ - longTermAccountsReceivableFromClients?: number | null; - - /** - * Total do ativo não circulante (agregado). - */ - longTermAssets?: number | null; - - /** - * Ativos biológicos de longo prazo. - */ - longTermBiologicalAssets?: number | null; - - /** - * Obrigações de capitalização de longo prazo. - */ - longTermCapitalization?: number | null; - - /** - * Obrigações de previdência complementar de longo prazo. - */ - longTermComplementaryPension?: number | null; - - /** - * Debêntures (passivo não circulante). - */ - longTermDebentures?: number | null; - - /** - * Débitos de operações (longo prazo). - */ - longTermDebitsFromOperations?: number | null; - - /** - * Dívida de longo prazo (empréstimos e financiamentos não circulantes). - */ - longTermDebt?: number | null; - - /** - * Tributos diferidos (Ativo Não Circulante). - */ - longTermDeferredTaxes?: number | null; - - /** - * Investimentos financeiros de longo prazo mensurados a valor justo por meio do - * resultado. - */ - longTermFinancialInvestmentsMeasuredAtFairValueThroughIncome?: number | null; - - /** - * Obrigações de seguros e resseguros de longo prazo. - */ - longTermInsuranceAndReinsurance?: number | null; - - /** - * Estoques de longo prazo. - */ - longTermInventory?: number | null; - - /** - * Investimentos de longo prazo. - */ - longTermInvestments?: number | null; - - /** - * Financiamento por arrendamento mercantil (não circulante). - */ - longTermLeaseFinancing?: number | null; - - /** - * Total do passivo de longo prazo. - */ - longTermLiabilities?: number | null; - - /** - * Empréstimos e financiamentos (não circulante). - */ - longTermLoansAndFinancing?: number | null; - - /** - * Empréstimos e financiamentos em moeda estrangeira (não circulante). - */ - longTermLoansAndFinancingInForeignCurrency?: number | null; - - /** - * Empréstimos e financiamentos em moeda nacional (não circulante). - */ - longTermLoansAndFinancingInNationalCurrency?: number | null; - - /** - * Despesas antecipadas de longo prazo. - */ - longTermPrepaidExpenses?: number | null; - - /** - * Provisões (passivo não circulante). - */ - longTermProvisions?: number | null; - - /** - * Ativo realizável a longo prazo. - */ - longTermRealizableAssets?: number | null; - - /** - * Contas a receber de longo prazo. - */ - longTermReceivables?: number | null; - - /** - * Provisões técnicas de longo prazo. - */ - longTermTechnicalProvisions?: number | null; - - /** - * Participação de não controladores (no patrimônio líquido). - */ - minorityInterest?: number | null; - - /** - * Fornecedores nacionais. - */ - nationalSuppliers?: number | null; - - /** - * Contas a receber líquidas (clientes). - */ - netReceivables?: number | null; - - /** - * Ativos tangíveis líquidos (Ativo Total - Intangíveis - Passivo Total). - */ - netTangibleAssets?: number | null; - - /** - * Participação dos não controladores no patrimônio líquido. - */ - nonControllingShareholdersEquity?: number | null; - - /** - * Total do ativo não circulante (sinônimo de `longTermAssets`). - */ - nonCurrentAssets?: number | null; - - /** - * Total do passivo não circulante. - */ - nonCurrentLiabilities?: number | null; - - /** - * Outras contas a receber. - */ - otherAccountsReceivable?: number | null; - - /** - * Outros ativos não circulantes. - */ - otherAssets?: number | null; - - /** - * Outros resultados abrangentes. - */ - otherComprehensiveResults?: number | null; - - /** - * Outros ativos circulantes. - */ - otherCurrentAssets?: number | null; - - /** - * Outros passivos circulantes. - */ - otherCurrentLiab?: number | null; - - /** - * Outros passivos circulantes (sinônimo de `otherCurrentLiab`). - */ - otherCurrentLiabilities?: number | null; - - /** - * Outros débitos. - */ - otherDebits?: number | null; - - /** - * Outros passivos não circulantes. - */ - otherLiab?: number | null; - - /** - * Outros passivos. - */ - otherLiabilities?: number | null; - - /** - * Outras obrigações (passivo não circulante). - */ - otherLongTermObligations?: number | null; - - /** - * Outras provisões de longo prazo. - */ - otherLongTermProvisions?: number | null; - - /** - * Outros créditos/recebíveis de longo prazo. - */ - otherLongTermReceivables?: number | null; - - /** - * Outros ativos não circulantes (detalhamento). - */ - otherNonCurrentAssets?: number | null; - - /** - * Outros passivos não circulantes. - */ - otherNonCurrentLiabilities?: number | null; - - /** - * Outras obrigações (circulante). - */ - otherObligations?: number | null; - - /** - * Outras contas operacionais no ativo. - */ - otherOperations?: number | null; - - /** - * Outras provisões (diversas). - */ - otherProvisions?: number | null; - - /** - * Outros componentes do patrimônio líquido. - */ - otherStockholderEquity?: number | null; - - /** - * Outros valores e bens. - */ - otherValuesAndAssets?: number | null; - - /** - * Despesas antecipadas. - */ - prepaidExpenses?: number | null; - - /** - * Reservas de lucros. - */ - profitReserves?: number | null; - - /** - * Lucros e receitas a apropriar. - */ - profitsAndRevenuesToBeAppropriated?: number | null; - - /** - * Imobilizado (propriedades, instalações e equipamentos). - */ - propertyPlantEquipment?: number | null; - - /** - * Fornecedores (sinônimo de `accountsPayable`). - */ - providers?: number | null; - - /** - * Provisões (passivo). - */ - provisions?: number | null; - - /** - * Capital social realizado (sinônimo de `commonStock`). - */ - realizedShareCapital?: number | null; - - /** - * Lucros/Prejuízos acumulados. - */ - retainedEarnings?: number | null; - - /** - * Reservas de reavaliação. - */ - revaluationReserves?: number | null; - - /** - * Títulos e créditos a receber. - */ - securitiesAndCreditsReceivable?: number | null; - - /** - * Patrimônio líquido (sinônimo de `totalStockholderEquity`). - */ - shareholdersEquity?: number | null; - - /** - * Participações societárias. - */ - shareholdings?: number | null; - - /** - * Dívida de curto prazo (empréstimos e financiamentos circulantes). - */ - shortLongTermDebt?: number | null; - - /** - * Aplicações financeiras de curto prazo. - */ - shortTermInvestments?: number | null; - - /** - * Obrigações sociais e trabalhistas. - */ - socialAndLaborObligations?: number | null; - - /** - * Ticker do ativo ao qual o balanço se refere. - */ - symbol?: string; - - /** - * Impostos a recuperar. - */ - taxesToRecover?: number | null; - - /** - * Obrigações fiscais (passivo). - */ - taxLiabilities?: number | null; - - /** - * Obrigações fiscais (passivo circulante). - */ - taxObligations?: number | null; - - /** - * Provisões técnicas (seguradoras/previdência). - */ - technicalProvisions?: number | null; - - /** - * Depósitos de terceiros. - */ - thirdPartyDeposits?: number | null; - - /** - * Total do ativo. - */ - totalAssets?: number | null; - - /** - * Total do ativo circulante. - */ - totalCurrentAssets?: number | null; - - /** - * Total do passivo circulante. - */ - totalCurrentLiabilities?: number | null; - - /** - * Total do passivo (circulante + não circulante). - */ - totalLiab?: number | null; - - /** - * Total do passivo. - */ - totalLiabilities?: number | null; - - /** - * Total do patrimônio líquido. - */ - totalStockholderEquity?: number | null; - - /** - * Ações em tesouraria. - */ - treasuryStock?: number | null; - - /** - * Indica a periodicidade do balanço: `yearly` (anual) ou `quarterly` (trimestral). - */ - type?: 'yearly' | 'quarterly'; - - /** - * Data da última atualização deste registro (YYYY-MM-DD). - */ - updatedAt?: string | null; -} - -/** - * Representa os dados de uma Demonstração do Fluxo de Caixa (DFC) para um período - * específico (anual ou trimestral). - */ -export interface CashflowEntry { - /** - * Ajustes ao lucro/prejuízo (depreciação, amortização, equivalência patrimonial, - * variações não caixa). - */ - adjustmentsToProfitOrLoss?: number | null; - - /** - * Caixa gerado nas operações (após variações no capital de giro). - */ - cashGeneratedInOperations?: number | null; - - /** - * Variações em Ativos e Passivos Operacionais (Clientes, Estoques, Fornecedores, - * etc.). - */ - changesInAssetsAndLiabilities?: number | null; - - /** - * Data de término do período fiscal ao qual a DFC se refere (YYYY-MM-DD). - */ - endDate?: string; - - /** - * Variação cambial sem efeito caixa (ajuste de conversão). - */ - exchangeVariationWithoutCash?: number | null; - - /** - * Saldo Final de Caixa e Equivalentes no final do período. - */ - finalCashBalance?: number | null; - - /** - * Fluxo de Caixa das Atividades de Financiamento (FCF) (Captação/Pagamento de - * Empréstimos, Emissão/Recompra de Ações, Dividendos pagos). - */ - financingCashFlow?: number | null; - - /** - * Efeito da Variação Cambial sobre o Caixa e Equivalentes. - */ - foreignExchangeRateWithoutCash?: number | null; - - /** - * Caixa Gerado nas Operações (antes das variações de ativos/passivos). - */ - incomeFromOperations?: number | null; - - /** - * Aumento ou Redução Líquida de Caixa e Equivalentes (FCO + FCI + FCF + Variação - * Cambial). - */ - increaseOrDecreaseInCash?: number | null; - - /** - * Saldo Inicial de Caixa e Equivalentes no início do período. - */ - initialCashBalance?: number | null; - - /** - * Fluxo de Caixa das Atividades de Investimento (FCI) (Compra/Venda de - * Imobilizado, Investimentos). - */ - investmentCashFlow?: number | null; - - /** - * Lucro líquido antes dos impostos (base para reconciliação pelo método indireto). - */ - netIncomeBeforeTaxes?: number | null; - - /** - * Fluxo de Caixa das Atividades Operacionais (FCO). - */ - operatingCashFlow?: number | null; - - /** - * Outras Atividades Operacionais (Juros pagos/recebidos, Impostos pagos, etc.). - */ - otherOperatingActivities?: number | null; - - /** - * Ticker do ativo ao qual a DFC se refere. - */ - symbol?: string; - - /** - * Indica a periodicidade da DFC: `yearly` (anual) ou `quarterly` (trimestral). - */ - type?: 'yearly' | 'quarterly'; - - /** - * Data da última atualização deste registro específico na fonte de dados - * (YYYY-MM-DD). - */ - updatedAt?: string; -} - -/** - * Representa um conjunto de principais indicadores e estatísticas financeiras para - * um período (TTM, anual ou trimestral). - */ -export interface DefaultKeyStatisticsEntry { - /** - * Variação percentual do preço da ação nas últimas 52 semanas. - */ - '52WeekChange'?: number | null; - - /** - * Beta da ação (sensibilidade em relação ao mercado). - */ - beta?: number | null; - - /** - * Valor Patrimonial por Ação (VPA): Patrimônio Líquido / Ações em Circulação. - */ - bookValue?: number | null; - - /** - * Dividend Yield (provento anualizado sobre o preço atual). - */ - dividendYield?: number | null; - - /** - * Crescimento percentual do lucro líquido no último ano fiscal completo em relação - * ao ano anterior. - */ - earningsAnnualGrowth?: number | null; - - /** - * Crescimento percentual do lucro líquido no último trimestre em relação ao mesmo - * trimestre do ano anterior (YoY). - */ - earningsQuarterlyGrowth?: number | null; - - /** - * Múltiplo EV/EBITDA (Enterprise Value / EBITDA TTM). - */ - enterpriseToEbitda?: number | null; - - /** - * Múltiplo EV/Receita (Enterprise Value / Receita Líquida TTM). - */ - enterpriseToRevenue?: number | null; - - /** - * Valor da Firma (Enterprise Value - EV): Market Cap + Dívida Total - Caixa. - */ - enterpriseValue?: number | null; - - /** - * Ações em livre circulação (free float). - */ - floatShares?: number | null; - - /** - * Lucro Por Ação projetado (próximo período). - */ - forwardEps?: number | null; - - /** - * Preço / Lucro Projetado (Forward P/E): Preço da Ação / LPA estimado para o - * próximo período. - */ - forwardPE?: number | null; - - /** - * Percentual de ações detidas por insiders (administradores, controladores). - */ - heldPercentInsiders?: number | null; - - /** - * Percentual de ações detidas por instituições (fundos, investidores - * institucionais). - */ - heldPercentInstitutions?: number | null; - - /** - * Ações implícitas em circulação (considerando diluição/derivativos). - */ - impliedSharesOutstanding?: number | null; - - /** - * Data de pagamento (ou 'Data Com') do último dividendo/JCP (YYYY-MM-DD). - */ - lastDividendDate?: string | null; - - /** - * Valor do último dividendo ou JCP pago por ação. - */ - lastDividendValue?: number | null; - - /** - * Data de encerramento do último ano fiscal (YYYY-MM-DD). - */ - lastFiscalYearEnd?: string | null; - - /** - * Data do último desdobramento/grupamento (timestamp UNIX em segundos). - */ - lastSplitDate?: number | null; - - /** - * Fator do último desdobramento/grupamento (ex.: 2:1, 1:10). - */ - lastSplitFactor?: string | null; - - /** - * Data de término do trimestre mais recente considerado nos cálculos (YYYY-MM-DD). - */ - mostRecentQuarter?: string | null; - - /** - * Lucro Líquido atribuível aos acionistas ordinários (controladores). - */ - netIncomeToCommon?: number | null; - - /** - * Data de encerramento do próximo ano fiscal (YYYY-MM-DD). - */ - nextFiscalYearEnd?: string | null; - - /** - * Índice PEG (P/E dividido pelo crescimento esperado dos lucros). - */ - pegRatio?: number | null; - - /** - * Preço sobre Valor Patrimonial (P/VP): Preço da Ação / VPA. - */ - priceToBook?: number | null; - - /** - * Margem de Lucro Líquida (Lucro Líquido / Receita Líquida). Geralmente em base - * TTM ou anual. - */ - profitMargins?: number | null; - - /** - * Variação percentual do índice S&P 500 nas últimas 52 semanas (para referência). - */ - SandP52WeekChange?: number | null; - - /** - * Número total de ações ordinárias em circulação. - */ - sharesOutstanding?: number | null; - - /** - * Ticker do ativo ao qual as estatísticas se referem. - */ - symbol?: string; - - /** - * Valor total dos ativos registrado no último balanço (anual ou trimestral). - */ - totalAssets?: number | null; - - /** - * Lucro Por Ação (LPA) dos Últimos 12 Meses (TTM). - */ - trailingEps?: number | null; - - /** - * Periodicidade dos dados: `yearly` (anual), `quarterly` (trimestral), `ttm` - * (Trailing Twelve Months - últimos 12 meses). - */ - type?: 'yearly' | 'quarterly' | 'ttm'; - - /** - * Data da última atualização deste registro específico na fonte de dados - * (YYYY-MM-DD). - */ - updatedAt?: string | null; - - /** - * Retorno percentual do preço da ação desde o início do ano atual (Year-to-Date). - */ - ytdReturn?: number | null; -} - -/** - * Representa um conjunto de dados e indicadores financeiros calculados para um - * período (TTM, anual ou trimestral). - */ -export interface FinancialDataEntry { - /** - * Preço atual da ação (pode ser ligeiramente defasado). - */ - currentPrice?: number | null; - - /** - * Índice de Liquidez Corrente (Ativo Circulante / Passivo Circulante). - */ - currentRatio?: number | null; - - /** - * Índice Dívida Líquida / Patrimônio Líquido. - */ - debtToEquity?: number | null; - - /** - * Crescimento do Lucro Líquido (geralmente trimestral YoY, como - * `earningsQuarterlyGrowth`). - */ - earningsGrowth?: number | null; - - /** - * Lucro Antes de Juros, Impostos, Depreciação e Amortização (LAJIDA ou EBITDA). - * Geralmente TTM. - */ - ebitda?: number | null; - - /** - * Margem EBITDA (EBITDA TTM / Receita Líquida TTM). - */ - ebitdaMargins?: number | null; - - /** - * Moeda na qual os dados financeiros são reportados (ex: `BRL`, `USD`). - */ - financialCurrency?: string | null; - - /** - * Fluxo de Caixa Livre (FCO - CAPEX) - (geralmente TTM). - */ - freeCashflow?: number | null; - - /** - * Margem Bruta (Lucro Bruto TTM / Receita Líquida TTM). - */ - grossMargins?: number | null; - - /** - * Lucro Bruto (geralmente TTM). - */ - grossProfits?: number | null; - - /** - * Número de opiniões de analistas consideradas. - */ - numberOfAnalystOpinions?: number | null; - - /** - * Fluxo de Caixa das Operações (FCO) - (geralmente TTM). - */ - operatingCashflow?: number | null; - - /** - * Margem Operacional (EBIT TTM / Receita Líquida TTM). - */ - operatingMargins?: number | null; - - /** - * Margem Líquida (Lucro Líquido TTM / Receita Líquida TTM). Sinônimo do campo de - * mesmo nome em `DefaultKeyStatisticsEntry`. - */ - profitMargins?: number | null; - - /** - * Índice de Liquidez Seca ((Ativo Circulante - Estoques) / Passivo Circulante). - */ - quickRatio?: number | null; - - /** - * Resumo da recomendação (ex.: strong_buy, buy, hold, sell, strong_sell). - */ - recommendationKey?: string | null; - - /** - * Média de recomendações dos analistas (1=Compra Forte, 5=Venda Forte). - */ - recommendationMean?: number | null; - - /** - * Retorno sobre Ativos (ROA): Lucro Líquido TTM / Ativo Total Médio. - */ - returnOnAssets?: number | null; - - /** - * Retorno sobre Patrimônio Líquido (ROE): Lucro Líquido TTM / Patrimônio Líquido - * Médio. - */ - returnOnEquity?: number | null; - - /** - * Crescimento da Receita Líquida (geralmente trimestral YoY). - */ - revenueGrowth?: number | null; - - /** - * Receita Líquida por Ação (Receita Líquida TTM / Ações em Circulação). - */ - revenuePerShare?: number | null; - - /** - * Ticker do ativo ao qual os dados se referem. - */ - symbol?: string; - - /** - * Preço-alvo mais alto estimado por analistas. - */ - targetHighPrice?: number | null; - - /** - * Preço-alvo mais baixo estimado por analistas. - */ - targetLowPrice?: number | null; - - /** - * Preço-alvo médio estimado por analistas. - */ - targetMeanPrice?: number | null; - - /** - * Preço-alvo mediano estimado por analistas. - */ - targetMedianPrice?: number | null; - - /** - * Caixa e Equivalentes de Caixa + Aplicações Financeiras de Curto Prazo (último - * balanço). - */ - totalCash?: number | null; - - /** - * Caixa Total por Ação (Caixa Total / Ações em Circulação). - */ - totalCashPerShare?: number | null; - - /** - * Dívida Bruta Total (Dívida de Curto Prazo + Dívida de Longo Prazo - último - * balanço). - */ - totalDebt?: number | null; - - /** - * Receita Líquida Total (geralmente TTM). - */ - totalRevenue?: number | null; - - /** - * Periodicidade dos dados: `yearly` (anual), `quarterly` (trimestral), `ttm` - * (Trailing Twelve Months). - */ - type?: 'yearly' | 'quarterly' | 'ttm'; - - /** - * Data da última atualização deste registro específico na fonte de dados - * (YYYY-MM-DD). - */ - updatedAt?: string; -} - -/** - * Representa os dados de uma Demonstração do Resultado do Exercício (DRE) para um - * período específico (anual ou trimestral). - */ -export interface IncomeStatementEntry { - /** - * Identificador único deste registro de DRE (interno). - */ - id?: string; - - /** - * Despesas Administrativas (detalhamento, pode estar contido em SG&A). - */ - administrativeCosts?: number | null; - - /** - * Lucro Básico por Ação Ordinária (ON). - */ - basicEarningsPerCommonShare?: number | null; - - /** - * Lucro Básico por Ação Preferencial (PN). - */ - basicEarningsPerPreferredShare?: number | null; - - /** - * Lucro Básico por Ação (LPA Básico) - Geral. - */ - basicEarningsPerShare?: number | null; - - /** - * Resultado de Operações de Capitalização (específico para Seguradoras). - */ - capitalizationOperations?: number | null; - - /** - * Custos com Sinistros e Operações (específico para Seguradoras). - */ - claimsAndOperationsCosts?: number | null; - - /** - * Resultado de Operações de Previdência Complementar (específico para - * Seguradoras/Previdência). - */ - complementaryPensionOperations?: number | null; - - /** - * Custo dos Produtos Vendidos (CPV) ou Custo dos Serviços Prestados (CSP). - */ - costOfRevenue?: number | null; - - /** - * Imposto de Renda e Contribuição Social Correntes. - */ - currentTaxes?: number | null; - - /** - * Imposto de Renda e Contribuição Social Diferidos. - */ - deferredTaxes?: number | null; - - /** - * Lucro Diluído por Ação Ordinária (ON). - */ - dilutedEarningsPerCommonShare?: number | null; - - /** - * Lucro Diluído por Ação Preferencial (PN). - */ - dilutedEarningsPerPreferredShare?: number | null; - - /** - * Lucro Diluído por Ação (LPA Diluído) - Geral. - */ - dilutedEarningsPerShare?: number | null; - - /** - * Resultado Líquido das Operações Descontinuadas. - */ - discontinuedOperations?: number | null; - - /** - * Lucro por Ação (LPA) - Geral (pode ser básico ou diluído, verificar contexto). - */ - earningsPerShare?: number | null; - - /** - * Lucro Antes dos Juros e Impostos (LAJIR ou EBIT). Geralmente igual a - * `operatingIncome`. - */ - ebit?: number | null; - - /** - * Efeito de Mudanças Contábeis. - */ - effectOfAccountingCharges?: number | null; - - /** - * Data de término do período fiscal ao qual a DRE se refere (YYYY-MM-DD). - */ - endDate?: string; - - /** - * Resultado de Equivalência Patrimonial. - */ - equityIncomeResult?: number | null; - - /** - * Itens Extraordinários. - */ - extraordinaryItems?: number | null; - - /** - * Despesas Financeiras (valor positivo aqui, diferente de `interestExpense`). - */ - financialExpenses?: number | null; - - /** - * Receitas Financeiras. - */ - financialIncome?: number | null; - - /** - * Resultado Financeiro Líquido. - */ - financialResult?: number | null; - - /** - * Lucro Bruto (Receita Líquida - CPV/CSP). - */ - grossProfit?: number | null; - - /** - * Resultado Antes das Participações Estatutárias. - */ - incomeBeforeStatutoryParticipationsAndContributions?: number | null; - - /** - * Lucro Antes do Imposto de Renda e Contribuição Social (LAIR). EBIT + Resultado - * Financeiro. - */ - incomeBeforeTax?: number | null; - - /** - * Imposto de Renda e Contribuição Social sobre o Lucro. - */ - incomeTaxExpense?: number | null; - - /** - * Resultado de Operações de Seguros (específico para Seguradoras). - */ - insuranceOperations?: number | null; - - /** - * Despesas Financeiras (Juros pagos). Note que este campo é negativo. - */ - interestExpense?: number | null; - - /** - * Perdas por Não Recuperabilidade de Ativos (Impairment). - */ - lossesDueToNonRecoverabilityOfAssets?: number | null; - - /** - * Participação de Acionistas Não Controladores (no Lucro Líquido). - */ - minorityInterest?: number | null; - - /** - * Lucro Líquido Consolidado do Período. - */ - netIncome?: number | null; - - /** - * Lucro Líquido Atribuível aos Acionistas Controladores (Ações Ordinárias). - */ - netIncomeApplicableToCommonShares?: number | null; - - /** - * Lucro Líquido das Operações Continuadas. - */ - netIncomeFromContinuingOps?: number | null; - - /** - * Itens Não Recorrentes (pode incluir outras despesas/receitas operacionais). - */ - nonRecurring?: number | null; - - /** - * Lucro Operacional (EBIT - Earnings Before Interest and Taxes). Lucro Bruto - - * Despesas Operacionais. - */ - operatingIncome?: number | null; - - /** - * Outros Itens. - */ - otherItems?: number | null; - - /** - * Outras Despesas Operacionais. - */ - otherOperatingExpenses?: number | null; - - /** - * Outras Receitas Operacionais (detalhamento). - */ - otherOperatingIncome?: number | null; - - /** - * Outras Receitas e Despesas Operacionais (agregado). - */ - otherOperatingIncomeAndExpenses?: number | null; - - /** - * Participações nos Lucros e Contribuições Estatutárias. - */ - profitSharingAndStatutoryContributions?: number | null; - - /** - * Resultado de Operações de Resseguros (específico para Seguradoras). - */ - reinsuranceOperations?: number | null; - - /** - * Despesas com Pesquisa e Desenvolvimento. - */ - researchDevelopment?: number | null; - - /** - * Despesas com Vendas (detalhamento, pode estar contido em SG&A). - */ - salesExpenses?: number | null; - - /** - * Despesas com Vendas, Gerais e Administrativas. - */ - sellingGeneralAdministrative?: number | null; - - /** - * Ticker do ativo ao qual a DRE se refere. - */ - symbol?: string; - - /** - * Total das Despesas Operacionais (P&D + SG&A + Outras). - */ - totalOperatingExpenses?: number | null; - - /** - * Resultado Financeiro Líquido + Outras Receitas/Despesas. - */ - totalOtherIncomeExpenseNet?: number | null; - - /** - * Receita Operacional Líquida. - */ - totalRevenue?: number | null; - - /** - * Indica a periodicidade da DRE: `yearly` (anual) ou `quarterly` (trimestral). - */ - type?: 'yearly' | 'quarterly'; - - /** - * Data da última atualização deste registro específico na fonte de dados - * (YYYY-MM-DD). - */ - updatedAt?: string | null; -} - -/** - * Representa os dados de uma Demonstração do Valor Adicionado (DVA) para um - * período específico (anual ou trimestral). A DVA mostra como a riqueza gerada - * pela empresa foi distribuída. - */ -export interface ValueAddedEntry { - /** - * Valor Adicionado Recebido em Transferência (Resultado de Equivalência - * Patrimonial, Receitas Financeiras, etc.). Item 6 da DVA. - */ - addedValueReceivedByTransfer?: number | null; - - /** - * Valor Adicionado Recebido em Transferência (sinônimo de - * `addedValueReceivedByTransfer`). - */ - addedValueReceivedOnTransfer?: number | null; - - /** - * Valor Adicionado Total a Distribuir (Líquido Produzido + Recebido em - * Transferência). Item 7 da DVA. - */ - addedValueToDistribute?: number | null; - - /** - * Sinistros Retidos e Benefícios. - */ - claimsAndBenefits?: number | null; - - /** - * Receita com Operações de Previdência Complementar. - */ - complementaryPensionOperationsRevenue?: number | null; - - /** - * Construção de Ativos Próprios. - */ - constructionOfOwnAssets?: number | null; - - /** - * Custos dos Produtos, Mercadorias e Serviços Vendidos (detalhamento). - */ - costsWithProductsSold?: number | null; - - /** - * Depreciação e Amortização. - */ - depreciationAndAmortization?: number | null; - - /** - * Distribuição do Valor Adicionado (Soma dos itens seguintes). Item 8 da DVA. - */ - distributionOfAddedValue?: number | null; - - /** - * Dividendos Distribuídos. - */ - dividends?: number | null; - - /** - * Data de término do período fiscal ao qual a DVA se refere (YYYY-MM-DD). - */ - endDate?: string; - - /** - * Resultado de Equivalência Patrimonial (como receita na DVA). - */ - equityIncomeResult?: number | null; - - /** - * Remuneração de Capitais Próprios (JCP, Dividendos, Lucros Retidos). - */ - equityRemuneration?: number | null; - - /** - * Impostos Federais (IRPJ, CSLL, PIS, COFINS, IPI). - */ - federalTaxes?: number | null; - - /** - * Receita com Taxas e Comissões. - */ - feesRevenue?: number | null; - - /** - * Receitas Financeiras (como valor recebido em transferência). - */ - financialIncome?: number | null; - - /** - * Despesas de Intermediação Financeira (específico para bancos). - */ - financialIntermediationExpenses?: number | null; - - /** - * Receita de Intermediação Financeira (específico para bancos). - */ - financialIntermediationRevenue?: number | null; - - /** - * Valor Adicionado Bruto (Receitas - Insumos). Item 3 da DVA. - */ - grossAddedValue?: number | null; - - /** - * Receita com Operações de Seguros (específico para Seguradoras). - */ - insuranceOperationsRevenue?: number | null; - - /** - * Variações de Operações de Seguros. - */ - insuranceOperationsVariations?: number | null; - - /** - * Juros sobre o Capital Próprio (JCP). - */ - interestOnOwnEquity?: number | null; - - /** - * Perda/Recuperação de Valores de Ativos (Impairment - como custo/receita). - */ - lossOrRecoveryOfAssets?: number | null; - - /** - * Perda / Recuperação de Valores de Ativos (Impairment). - */ - lossOrRecoveryOfAssetValues?: number | null; - - /** - * Custos com Materiais, Energia, Serviços de Terceiros e Outros. - */ - materialsEnergyAndOthers?: number | null; - - /** - * Impostos Municipais (ISS). - */ - municipalTaxes?: number | null; - - /** - * Valor Adicionado Líquido Produzido pela Entidade (Bruto - Retenções). Item 5 da - * DVA. - */ - netAddedValue?: number | null; - - /** - * Valor Adicionado Líquido Produzido (sinônimo de `netAddedValue`). - */ - netAddedValueProduced?: number | null; - - /** - * Receita Operacional Líquida (detalhamento). - */ - netOperatingRevenue?: number | null; - - /** - * Participação dos Não Controladores nos Lucros Retidos. - */ - nonControllingShareOfRetainedEarnings?: number | null; - - /** - * Outras Distribuições. - */ - otherDistributions?: number | null; - - /** - * Outras Retenções (Exaustão, etc.). - */ - otherRetentions?: number | null; - - /** - * Outras Receitas. - */ - otherRevenues?: number | null; - - /** - * Outros Insumos. - */ - otherSupplies?: number | null; - - /** - * Outros Valores Recebidos (Receitas Financeiras, Aluguéis, etc.). - */ - otherValuesReceivedByTransfer?: number | null; - - /** - * Outras Variações. - */ - otherVariations?: number | null; - - /** - * Remuneração de Capitais Próprios (sinônimo de `equityRemuneration`). - */ - ownEquityRemuneration?: number | null; - - /** - * Variações de Operações de Previdência. - */ - pensionOperationsVariations?: number | null; - - /** - * Venda de Produtos e Serviços (detalhamento). - */ - productSales?: number | null; - - /** - * Provisão/Reversão para Créditos de Liquidação Duvidosa (PCLD - como - * receita/despesa na DVA). - */ - provisionOrReversalOfDoubtfulAccounts?: number | null; - - /** - * Provisão/Reversão de Perdas com Risco de Crédito (PCLD). - */ - provisionOrReversalOfExpectedCreditRiskLosses?: number | null; - - /** - * Remuneração de Capitais de Terceiros (Juros, Aluguéis). - */ - remunerationOfThirdPartyCapitals?: number | null; - - /** - * Resultado de Operações de Cosseguros Cedidos. - */ - resultOfCoinsuranceOperationsAssigned?: number | null; - - /** - * Resultados de Operações de Resseguros Cedidos. - */ - resultsOfCededReinsuranceOperations?: number | null; - - /** - * Lucros Retidos ou Prejuízo do Exercício. - */ - retainedEarningsOrLoss?: number | null; - - /** - * Retenções (Depreciação, Amortização e Exaustão). Item 4 da DVA. - */ - retentions?: number | null; - - /** - * Receitas (Venda de Mercadorias, Produtos e Serviços, etc.). Item 1 da DVA. - */ - revenue?: number | null; - - /** - * Receita da Prestação de Serviços (detalhamento). - */ - revenueFromTheProvisionOfServices?: number | null; - - /** - * Serviços de Terceiros (detalhamento). - */ - services?: number | null; - - /** - * Impostos Estaduais (ICMS). - */ - stateTaxes?: number | null; - - /** - * Insumos Adquiridos de Terceiros (Custo de Mercadorias, Matérias-Primas). Item 2 - * da DVA. - */ - suppliesPurchasedFromThirdParties?: number | null; - - /** - * Ticker do ativo ao qual a DVA se refere. - */ - symbol?: string; - - /** - * Impostos, Taxas e Contribuições (Federais, Estaduais, Municipais). - */ - taxes?: number | null; - - /** - * Pessoal e Encargos (Salários, Benefícios, FGTS). - */ - teamRemuneration?: number | null; - - /** - * Materiais, Energia, Serviços de Terceiros. - */ - thirdPartyMaterialsAndServices?: number | null; - - /** - * Valor Adicionado Total a Distribuir (sinônimo de `addedValueToDistribute`). - */ - totalAddedValueToDistribute?: number | null; - - /** - * Indica a periodicidade da DVA: `yearly` (anual) ou `quarterly` (trimestral). - */ - type?: 'yearly' | 'quarterly'; - - /** - * Data da última atualização deste registro específico na fonte de dados - * (YYYY-MM-DD). - */ - updatedAt?: string; - - /** - * Variação nas Despesas de Comercialização Diferidas. - */ - variationInDeferredSellingExpenses?: number | null; - - /** - * Variações das Provisões Técnicas (específico para Seguradoras). - */ - variationsOfTechnicalProvisions?: number | null; -} - -/** - * Resposta principal do endpoint `/api/quote/{tickers}`. - */ -export interface QuoteRetrieveResponse { - /** - * Timestamp indicando quando a requisição foi recebida pelo servidor. Formato - * ISO 8601. - */ - requestedAt?: string; - - /** - * Array contendo os resultados detalhados para cada ticker solicitado. - */ - results?: Array; - - /** - * Tempo aproximado que o servidor levou para processar a requisição, em formato de - * string (ex: `746ms`). - */ - took?: string; -} - -export namespace QuoteRetrieveResponse { - /** - * Contém os dados detalhados de um ativo específico retornado pelo endpoint - * `/api/quote/{tickers}`. - */ - export interface Result { - /** - * Média do volume financeiro diário negociado nos últimos 10 dias. - */ - averageDailyVolume10Day?: number | null; - - /** - * Média do volume financeiro diário negociado nos últimos 3 meses. - */ - averageDailyVolume3Month?: number | null; - - /** - * Histórico **anual** do Balanço Patrimonial. Retornado apenas se `modules` - * incluir `balanceSheetHistory`. - */ - balanceSheetHistory?: Array | null; - - /** - * Histórico **trimestral** do Balanço Patrimonial. Retornado apenas se `modules` - * incluir `balanceSheetHistoryQuarterly`. - */ - balanceSheetHistoryQuarterly?: Array | null; - - /** - * Histórico **anual** da Demonstração do Fluxo de Caixa (DFC). Retornado apenas se - * `modules` incluir `cashflowHistory`. - */ - cashflowHistory?: Array | null; - - /** - * Histórico **trimestral** da Demonstração do Fluxo de Caixa (DFC). Retornado - * apenas se `modules` incluir `cashflowHistoryQuarterly`. - */ - cashflowHistoryQuarterly?: Array | null; - - /** - * Moeda na qual os valores monetários são expressos (geralmente `BRL`). - */ - currency?: string; - - /** - * Principais estatísticas financeiras atuais/TTM. Retornado apenas se `modules` - * incluir `defaultKeyStatistics`. - */ - defaultKeyStatistics?: QuoteAPI.DefaultKeyStatisticsEntry | null; - - /** - * Histórico **anual** das principais estatísticas. Retornado apenas se `modules` - * incluir `defaultKeyStatisticsHistory`. - */ - defaultKeyStatisticsHistory?: Array | null; - - /** - * Histórico **trimestral** das principais estatísticas. Retornado apenas se - * `modules` incluir `defaultKeyStatisticsHistoryQuarterly`. - */ - defaultKeyStatisticsHistoryQuarterly?: Array | null; - - /** - * Objeto contendo informações sobre dividendos, JCP e outros eventos corporativos. - * Retornado apenas se `dividends=true` for especificado na requisição. - */ - dividendsData?: Result.DividendsData | null; - - /** - * Lucro Por Ação (LPA) dos últimos 12 meses (TTM). Retornado se - * `fundamental=true`. - */ - earningsPerShare?: number | null; - - /** - * Preço máximo atingido nas últimas 52 semanas. - */ - fiftyTwoWeekHigh?: number | null; - - /** - * Variação absoluta entre o preço atual e o preço máximo das últimas 52 semanas. - */ - fiftyTwoWeekHighChange?: number | null; - - /** - * Variação percentual entre o preço atual e o preço máximo das últimas 52 semanas. - */ - fiftyTwoWeekHighChangePercent?: number | null; - - /** - * Preço mínimo atingido nas últimas 52 semanas. - */ - fiftyTwoWeekLow?: number | null; - - /** - * Variação absoluta entre o preço atual e o preço mínimo das últimas 52 semanas. - */ - fiftyTwoWeekLowChange?: number | null; - - /** - * String formatada mostrando o intervalo de preço das últimas 52 semanas (Mínimo - - * Máximo). - */ - fiftyTwoWeekRange?: string | null; - - /** - * Dados financeiros e indicadores TTM. Retornado apenas se `modules` incluir - * `financialData`. - */ - financialData?: QuoteAPI.FinancialDataEntry | null; - - /** - * Histórico **anual** de dados financeiros e indicadores. Retornado apenas se - * `modules` incluir `financialDataHistory`. - */ - financialDataHistory?: Array | null; - - /** - * Histórico **trimestral** de dados financeiros e indicadores. Retornado apenas se - * `modules` incluir `financialDataHistoryQuarterly`. - */ - financialDataHistoryQuarterly?: Array | null; - - /** - * Array contendo a série histórica de preços, retornado apenas se os parâmetros - * `range` e/ou `interval` forem especificados na requisição. - */ - historicalDataPrice?: Array | null; - - /** - * Histórico **anual** da Demonstração do Resultado (DRE). Retornado apenas se - * `modules` incluir `incomeStatementHistory`. - */ - incomeStatementHistory?: Array | null; - - /** - * Histórico **trimestral** da Demonstração do Resultado (DRE). Retornado apenas se - * `modules` incluir `incomeStatementHistoryQuarterly`. - */ - incomeStatementHistoryQuarterly?: Array | null; - - /** - * URL da imagem do logo do ativo/empresa. - */ - logourl?: string; - - /** - * Nome longo ou completo da empresa ou ativo. - */ - longName?: string | null; - - /** - * Capitalização de mercado total do ativo (Preço Atual x Ações em Circulação). - */ - marketCap?: number | null; - - /** - * Indicador Preço/Lucro (P/L): Preço Atual / Lucro Por Ação (LPA) TTM. Retornado - * se `fundamental=true`. - */ - priceEarnings?: number | null; - - /** - * Variação absoluta do preço no dia atual em relação ao fechamento anterior. - */ - regularMarketChange?: number | null; - - /** - * Variação percentual do preço no dia atual em relação ao fechamento anterior. - */ - regularMarketChangePercent?: number | null; - - /** - * Preço máximo atingido no dia de negociação atual. - */ - regularMarketDayHigh?: number | null; - - /** - * Preço mínimo atingido no dia de negociação atual. - */ - regularMarketDayLow?: number | null; - - /** - * String formatada mostrando o intervalo de preço do dia (Mínimo - Máximo). - */ - regularMarketDayRange?: string | null; - - /** - * Preço de abertura no dia de negociação atual. - */ - regularMarketOpen?: number | null; - - /** - * Preço de fechamento do pregão anterior. - */ - regularMarketPreviousClose?: number | null; - - /** - * Preço atual ou do último negócio registrado. - */ - regularMarketPrice?: number | null; - - /** - * Data e hora da última atualização da cotação (último negócio registrado). - * Formato ISO 8601. - */ - regularMarketTime?: string | null; - - /** - * Volume financeiro negociado no dia atual. - */ - regularMarketVolume?: number | null; - - /** - * Nome curto ou abreviado da empresa ou ativo. - */ - shortName?: string | null; - - /** - * Resumo do perfil da empresa. Retornado apenas se `modules` incluir - * `summaryProfile`. - */ - summaryProfile?: Result.SummaryProfile | null; - - /** - * Ticker (símbolo) do ativo (ex: `PETR4`, `^BVSP`). - */ - symbol?: string; - - /** - * Média móvel simples dos preços de fechamento dos últimos 200 dias. - */ - twoHundredDayAverage?: number | null; - - /** - * Variação absoluta entre o preço atual e a média de 200 dias. - */ - twoHundredDayAverageChange?: number | null; - - /** - * Variação percentual entre o preço atual e a média de 200 dias. - */ - twoHundredDayAverageChangePercent?: number | null; - - /** - * Timestamp da última atualização dos dados do índice na fonte (aplicável - * principalmente a índices, como `^BVSP`). Formato ISO 8601. - */ - updatedAt?: string | null; - - /** - * O intervalo (`interval`) efetivamente utilizado pela API para retornar os dados - * históricos, caso solicitado. - */ - usedInterval?: string | null; - - /** - * O período (`range`) efetivamente utilizado pela API para retornar os dados - * históricos, caso solicitado. - */ - usedRange?: string | null; - - /** - * Lista dos valores válidos que podem ser utilizados no parâmetro `interval` para - * este ativo específico. - */ - validIntervals?: Array; - - /** - * Lista dos valores válidos que podem ser utilizados no parâmetro `range` para - * este ativo específico. - */ - validRanges?: Array; - - /** - * Histórico **anual** da Demonstração do Valor Adicionado (DVA). Retornado apenas - * se `modules` incluir `valueAddedHistory`. - */ - valueAddedHistory?: Array | null; - - /** - * Histórico **trimestral** da Demonstração do Valor Adicionado (DVA). Retornado - * apenas se `modules` incluir `valueAddedHistoryQuarterly`. - */ - valueAddedHistoryQuarterly?: Array | null; - } - - export namespace Result { - /** - * Objeto contendo informações sobre dividendos, JCP e outros eventos corporativos. - * Retornado apenas se `dividends=true` for especificado na requisição. - */ - export interface DividendsData { - /** - * Lista de proventos pagos em dinheiro (Dividendos e JCP). - */ - cashDividends?: Array; - - /** - * Lista de eventos corporativos (Desdobramento, Grupamento, Bonificação). - */ - stockDividends?: Array; - - /** - * Lista de eventos de subscrição de ações (estrutura não detalhada aqui). - */ - subscriptions?: Array; - } - - export namespace DividendsData { - /** - * Detalhes sobre um pagamento de provento em dinheiro (Dividendo ou JCP). - */ - export interface CashDividend { - /** - * Data em que o pagamento do provento foi aprovado pela empresa. Pode ser uma - * estimativa em alguns casos. Formato ISO 8601. - */ - approvedOn?: string; - - /** - * Ticker do ativo que pagou o provento (ex: `ITSA4`). Pode incluir sufixos - * específicos relacionados ao evento. - */ - assetIssued?: string; - - /** - * Código ISIN (International Securities Identification Number) do ativo - * relacionado ao provento. - */ - isinCode?: string | null; - - /** - * Tipo do provento em dinheiro. Geralmente `DIVIDENDO` ou `JCP` (Juros sobre - * Capital Próprio). - */ - label?: string; - - /** - * Data Com (Ex-Date). Último dia em que era necessário possuir o ativo para ter - * direito a receber este provento. Pode ser uma estimativa. Formato ISO 8601. - */ - lastDatePrior?: string; - - /** - * Data efetiva em que o pagamento foi realizado (ou está previsto). Formato - * ISO 8601. - */ - paymentDate?: string | null; - - /** - * Valor bruto do provento pago por unidade do ativo (por ação, por cota). - */ - rate?: number; - - /** - * Descrição do período ou evento ao qual o provento se refere (ex: - * `1º Trimestre/2023`, `Resultado 2022`). - */ - relatedTo?: string | null; - - /** - * Observações adicionais ou informações relevantes sobre o provento. - */ - remarks?: string | null; - } - - /** - * Detalhes sobre um evento corporativo que afeta a quantidade de ações - * (Desdobramento/Split, Grupamento/Inplit, Bonificação). - */ - export interface StockDividend { - /** - * Data em que o evento foi aprovado. Formato ISO 8601. - */ - approvedOn?: string; - - /** - * Ticker do ativo afetado pelo evento. - */ - assetIssued?: string; - - /** - * Descrição textual do fator (ex: `1 / 10`, `10 / 1`). - */ - completeFactor?: string; - - /** - * Fator numérico do evento. - * - * - **Bonificação:** Percentual (ex: 0.1 para 10%). - * - **Desdobramento/Grupamento:** Fator multiplicativo ou divisor. - */ - factor?: number; - - /** - * Código ISIN do ativo. - */ - isinCode?: string | null; - - /** - * Tipo do evento: `DESDOBRAMENTO`, `GRUPAMENTO`, `BONIFICACAO`. - */ - label?: string; - - /** - * Data Com (Ex-Date). Último dia para possuir o ativo nas condições antigas. - * Formato ISO 8601. - */ - lastDatePrior?: string; - - /** - * Observações adicionais sobre o evento. - */ - remarks?: string | null; - } - } - - /** - * Representa um ponto na série histórica de preços de um ativo. - */ - export interface HistoricalDataPrice { - /** - * Preço de fechamento ajustado para proventos (dividendos, JCP, bonificações, - * etc.) e desdobramentos/grupamentos. - */ - adjustedClose?: number; - - /** - * Preço de fechamento do ativo no intervalo. - */ - close?: number; - - /** - * Data do pregão ou do ponto de dados, representada como um timestamp UNIX (número - * de segundos desde 1970-01-01 UTC). - */ - date?: number; - - /** - * Preço máximo atingido pelo ativo no intervalo. - */ - high?: number; - - /** - * Preço mínimo atingido pelo ativo no intervalo. - */ - low?: number; - - /** - * Preço de abertura do ativo no intervalo (dia, semana, mês, etc.). - */ - open?: number; - - /** - * Volume financeiro negociado no intervalo. - */ - volume?: number; - } - - /** - * Resumo do perfil da empresa. Retornado apenas se `modules` incluir - * `summaryProfile`. - */ - export interface SummaryProfile { - /** - * Linha 1 do endereço da sede da empresa. - */ - address1?: string | null; - - /** - * Linha 2 do endereço da sede da empresa (complemento). - */ - address2?: string | null; - - /** - * Cidade da sede da empresa. - */ - city?: string | null; - - /** - * Lista de diretores e executivos principais da empresa (estrutura interna do - * objeto não detalhada aqui). - */ - companyOfficers?: Array | null; - - /** - * País da sede da empresa. - */ - country?: string | null; - - /** - * Número estimado de funcionários em tempo integral. - */ - fullTimeEmployees?: number | null; - - /** - * Nome da indústria em que a empresa atua. - */ - industry?: string | null; - - /** - * Nome de exibição formatado para a indústria. - */ - industryDisp?: string | null; - - /** - * Chave interna ou código para a indústria. - */ - industryKey?: string | null; - - /** - * Descrição longa e detalhada sobre as atividades e o negócio da empresa. - */ - longBusinessSummary?: string | null; - - /** - * Número de telefone principal da empresa. - */ - phone?: string | null; - - /** - * Nome do setor de atuação da empresa. - */ - sector?: string | null; - - /** - * Nome de exibição formatado para o setor. - */ - sectorDisp?: string | null; - - /** - * Chave interna ou código para o setor. - */ - sectorKey?: string | null; - - /** - * Estado ou província da sede da empresa. - */ - state?: string | null; - - /** - * URL do website oficial da empresa. - */ - website?: string | null; - - /** - * Código Postal (CEP) da sede da empresa. - */ - zip?: string | null; - } - } -} - -/** - * Resposta do endpoint de listagem de cotações (`/api/quote/list`). - */ -export interface QuoteListResponse { - /** - * Lista de todos os setores disponíveis que podem ser usados no parâmetro de - * filtro `sector`. - */ - availableSectors?: Array; - - /** - * Lista dos tipos de ativos (`stock`, `fund`, `bdr`) disponíveis que podem ser - * usados no parâmetro de filtro `type`. - */ - availableStockTypes?: Array<'stock' | 'fund' | 'bdr'>; - - /** - * Número da página atual retornada nos resultados. - */ - currentPage?: number; - - /** - * Indica se existe uma próxima página de resultados (`true`) ou se esta é a última - * página (`false`). - */ - hasNextPage?: boolean; - - /** - * Lista resumida de índices relevantes (geralmente inclui IBOVESPA). - */ - indexes?: Array; - - /** - * Número de itens (ativos) retornados por página (conforme `limit` ou padrão). - */ - itemsPerPage?: number; - - /** - * Lista paginada e filtrada dos ativos solicitados. - */ - stocks?: Array; - - /** - * Número total de ativos encontrados que correspondem aos filtros aplicados (sem - * considerar a paginação). - */ - totalCount?: number; - - /** - * Número total de páginas existentes para a consulta/filtros aplicados. - */ - totalPages?: number; -} - -export namespace QuoteListResponse { - /** - * Resumo de informações de um índice, geralmente retornado em listas. - */ - export interface Index { - /** - * Nome do índice (ex: `IBOVESPA`). - */ - name?: string; - - /** - * Ticker do índice (ex: `^BVSP`). - */ - stock?: string; - } - - /** - * Resumo de informações de um ativo (ação, FII, BDR), geralmente retornado em - * listas. - */ - export interface Stock { - /** - * Variação percentual do preço em relação ao fechamento anterior. - */ - change?: number; - - /** - * Preço de fechamento mais recente ou último preço negociado. - */ - close?: number; - - /** - * URL para a imagem do logo da empresa/ativo. - */ - logo?: string; - - /** - * Capitalização de mercado (Preço x Quantidade de Ações). Pode ser nulo para FIIs - * ou outros tipos. - */ - market_cap?: number | null; - - /** - * Nome do ativo ou empresa (ex: `PETROBRAS PN`). - */ - name?: string; - - /** - * Setor de atuação da empresa (ex: `Energy Minerals`, `Finance`). Pode ser nulo ou - * variar para FIIs. - */ - sector?: string | null; - - /** - * Ticker do ativo (ex: `PETR4`, `MXRF11`). - */ - stock?: string; - - /** - * Tipo do ativo: `stock` (Ação), `fund` (Fundo Imobiliário/FII), `bdr` (Brazilian - * Depositary Receipt). - */ - type?: 'stock' | 'fund' | 'bdr'; - - /** - * Volume financeiro negociado no último pregão ou dia atual. - */ - volume?: number; - } -} - -export interface QuoteRetrieveParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui informações sobre - * dividendos e JCP (Juros sobre Capital Próprio) pagos historicamente pelo ativo - * na chave `dividendsData`. - */ - dividends?: boolean; - - /** - * **Opcional.** Booleano (`true` ou `false`). Se `true`, inclui dados - * fundamentalistas básicos na resposta, como Preço/Lucro (P/L) e Lucro Por Ação - * (LPA). - * - * **Nota:** Para dados fundamentalistas mais completos, utilize o parâmetro - * `modules`. - */ - fundamental?: boolean; - - /** - * **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço - * (`historicalDataPrice`). Requer que `range` também seja especificado. - * - * **Valores Possíveis:** - * - * - `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`: Intervalos intraday - * (minutos/horas). **Atenção:** Disponibilidade pode variar conforme o `range` e - * o ativo. - * - `1d`: Diário (padrão se `range` for especificado e `interval` omitido). - * - `5d`: 5 dias. - * - `1wk`: Semanal. - * - `1mo`: Mensal. - * - `3mo`: Trimestral. - */ - interval?: '1m' | '2m' | '5m' | '15m' | '30m' | '60m' | '90m' | '1h' | '1d' | '5d' | '1wk' | '1mo' | '3mo'; - - /** - * **Opcional.** Uma lista de módulos de dados adicionais, separados por vírgula - * (`,`), para incluir na resposta. Permite buscar dados financeiros detalhados. - * - * **Exemplos:** - * - * - `modules=summaryProfile` (retorna perfil da empresa) - * - `modules=balanceSheetHistory,incomeStatementHistory` (retorna histórico anual - * do BP e DRE) - * - * Veja a descrição principal do endpoint para a lista completa de módulos e seus - * conteúdos. - */ - modules?: Array< - | 'summaryProfile' - | 'balanceSheetHistory' - | 'defaultKeyStatistics' - | 'balanceSheetHistoryQuarterly' - | 'incomeStatementHistory' - | 'incomeStatementHistoryQuarterly' - | 'financialData' - | 'financialDataHistory' - | 'financialDataHistoryQuarterly' - | 'defaultKeyStatisticsHistory' - | 'defaultKeyStatisticsHistoryQuarterly' - | 'valueAddedHistory' - | 'valueAddedHistoryQuarterly' - | 'cashflowHistory' - | 'cashflowHistoryQuarterly' - >; - - /** - * **Opcional.** Define o período para os dados históricos de preço - * (`historicalDataPrice`). Se omitido, apenas a cotação mais recente é retornada - * (a menos que `interval` seja usado). - * - * **Valores Possíveis:** - * - * - `1d`: Último dia de pregão (intraday se `interval` for minutos/horas). - * - `5d`: Últimos 5 dias. - * - `1mo`: Último mês. - * - `3mo`: Últimos 3 meses. - * - `6mo`: Últimos 6 meses. - * - `1y`: Último ano. - * - `2y`: Últimos 2 anos. - * - `5y`: Últimos 5 anos. - * - `10y`: Últimos 10 anos. - * - `ytd`: Desde o início do ano atual (Year-to-Date). - * - `max`: Todo o período histórico disponível. - */ - range?: '1d' | '5d' | '1mo' | '3mo' | '6mo' | '1y' | '2y' | '5y' | '10y' | 'ytd' | 'max'; -} - -export interface QuoteListParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Número máximo de ativos a serem retornados por página. O valor - * padrão pode variar. - */ - limit?: number; - - /** - * **Opcional.** Número da página dos resultados a ser retornada, considerando o - * `limit` especificado. Começa em 1. - */ - page?: number; - - /** - * **Opcional.** Termo para buscar ativos por ticker (correspondência parcial). Ex: - * `PETR` encontrará `PETR4`, `PETR3`. - */ - search?: string; - - /** - * **Opcional.** Filtra os resultados por setor de atuação da empresa. Utilize um - * dos valores retornados em `availableSectors`. - */ - sector?: - | 'Retail Trade' - | 'Energy Minerals' - | 'Health Services' - | 'Utilities' - | 'Finance' - | 'Consumer Services' - | 'Consumer Non-Durables' - | 'Non-Energy Minerals' - | 'Commercial Services' - | 'Distribution Services' - | 'Transportation' - | 'Technology Services' - | 'Process Industries' - | 'Communications' - | 'Producer Manufacturing' - | 'Miscellaneous' - | 'Electronic Technology' - | 'Industrial Services' - | 'Health Technology' - | 'Consumer Durables'; - - /** - * **Opcional.** Campo pelo qual os resultados serão ordenados. - */ - sortBy?: 'name' | 'close' | 'change' | 'change_abs' | 'volume' | 'market_cap_basic' | 'sector'; - - /** - * **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). - * Requer que `sortBy` seja especificado. - */ - sortOrder?: 'asc' | 'desc'; - - /** - * **Opcional.** Filtra os resultados por tipo de ativo. - */ - type?: 'stock' | 'fund' | 'bdr'; -} - -export declare namespace Quote { - export { - type BalanceSheetEntry as BalanceSheetEntry, - type CashflowEntry as CashflowEntry, - type DefaultKeyStatisticsEntry as DefaultKeyStatisticsEntry, - type FinancialDataEntry as FinancialDataEntry, - type IncomeStatementEntry as IncomeStatementEntry, - type ValueAddedEntry as ValueAddedEntry, - type QuoteRetrieveResponse as QuoteRetrieveResponse, - type QuoteListResponse as QuoteListResponse, - type QuoteRetrieveParams as QuoteRetrieveParams, - type QuoteListParams as QuoteListParams, - }; -} diff --git a/src/resources/v2.ts b/src/resources/v2.ts deleted file mode 100644 index ca56a44..0000000 --- a/src/resources/v2.ts +++ /dev/null @@ -1,3 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export * from './v2/index'; diff --git a/src/resources/v2/crypto.ts b/src/resources/v2/crypto.ts deleted file mode 100644 index 2c2dbc0..0000000 --- a/src/resources/v2/crypto.ts +++ /dev/null @@ -1,344 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../../core/resource'; -import { APIPromise } from '../../core/api-promise'; -import { RequestOptions } from '../../internal/request-options'; - -export class Crypto extends APIResource { - /** - * Obtenha cotações atualizadas e dados históricos para uma ou mais criptomoedas. - * - * ### Funcionalidades: - * - * - **Cotação Múltipla:** Consulte várias criptomoedas em uma única requisição - * usando o parâmetro `coin`. - * - **Moeda de Referência:** Especifique a moeda fiduciária para a cotação com - * `currency` (padrão: BRL). - * - **Dados Históricos:** Solicite séries históricas usando `range` e `interval` - * (similar ao endpoint de ações). - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Cotação de Bitcoin (BTC) e Ethereum (ETH) em Dólar Americano (USD):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/crypto?coin=BTC,ETH¤cy=USD&token=SEU_TOKEN" - * ``` - * - * **Cotação de Cardano (ADA) em Real (BRL) com histórico do último mês (intervalo - * diário):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/crypto?coin=ADA¤cy=BRL&range=1mo&interval=1d&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta contém um array `coins`, onde cada objeto representa uma criptomoeda - * solicitada, incluindo sua cotação atual, dados de mercado e, opcionalmente, a - * série histórica (`historicalDataPrice`). - */ - retrieve(query: CryptoRetrieveParams, options?: RequestOptions): APIPromise { - return this._client.get('/api/v2/crypto', { query, ...options }); - } - - /** - * Obtenha a lista completa de todas as siglas (tickers) de criptomoedas que a API - * Brapi suporta para consulta no endpoint `/api/v2/crypto`. - * - * ### Funcionalidade: - * - * - Retorna um array `coins` com as siglas. - * - Pode ser filtrado usando o parâmetro `search`. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar todas as criptomoedas disponíveis:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/crypto/available?token=SEU_TOKEN" - * ``` - * - * **Buscar criptomoedas cujo ticker contenha 'DOGE':** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/crypto/available?search=DOGE&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta é um objeto JSON com a chave `coins`, contendo um array de strings - * com as siglas das criptomoedas (ex: `["BTC", "ETH", "LTC", "XRP"]`). - */ - listAvailable( - query: CryptoListAvailableParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/crypto/available', { query, ...options }); - } -} - -/** - * Resposta principal do endpoint `/api/v2/crypto`. - */ -export interface CryptoRetrieveResponse { - /** - * Array contendo os resultados detalhados para cada criptomoeda solicitada. - */ - coins?: Array; -} - -export namespace CryptoRetrieveResponse { - /** - * Contém os dados detalhados de uma criptomoeda específica retornada pelo endpoint - * `/api/v2/crypto`. - */ - export interface Coin { - /** - * Sigla (ticker) da criptomoeda (ex: `BTC`, `ETH`). - */ - coin?: string; - - /** - * URL da imagem do logo da criptomoeda. - */ - coinImageUrl?: string; - - /** - * Nome completo da criptomoeda (ex: `Bitcoin`, `Ethereum`). - */ - coinName?: string; - - /** - * Sigla da moeda fiduciária na qual os preços estão cotados (ex: `BRL`, `USD`). - */ - currency?: string; - - /** - * Taxa de câmbio da `currency` em relação ao USD (Dólar Americano). - * `1 USD = X currency`. - */ - currencyRateFromUSD?: number; - - /** - * Array contendo a série histórica de preços, retornado se `range` ou `interval` - * forem especificados. - */ - historicalDataPrice?: Array | null; - - /** - * Capitalização de mercado da criptomoeda na `currency` especificada. - */ - marketCap?: number; - - /** - * Variação absoluta do preço nas últimas 24 horas (ou período relevante). - */ - regularMarketChange?: number; - - /** - * Variação percentual do preço nas últimas 24 horas (ou período relevante). - */ - regularMarketChangePercent?: number; - - /** - * Preço máximo nas últimas 24 horas (ou período relevante). - */ - regularMarketDayHigh?: number; - - /** - * Preço mínimo nas últimas 24 horas (ou período relevante). - */ - regularMarketDayLow?: number; - - /** - * String formatada mostrando o intervalo de preço das últimas 24h (Mínimo - - * Máximo). - */ - regularMarketDayRange?: string; - - /** - * Preço atual da criptomoeda na `currency` especificada. - */ - regularMarketPrice?: number; - - /** - * Timestamp da última atualização da cotação. Formato ISO 8601. - */ - regularMarketTime?: string; - - /** - * Volume negociado nas últimas 24 horas (na `currency` especificada). - */ - regularMarketVolume?: number; - - /** - * O intervalo (`interval`) efetivamente utilizado para os dados históricos, se - * solicitado. - */ - usedInterval?: string | null; - - /** - * O período (`range`) efetivamente utilizado para os dados históricos, se - * solicitado. - */ - usedRange?: string | null; - - /** - * Lista dos valores válidos para o parâmetro `interval` nesta criptomoeda. - */ - validIntervals?: Array; - - /** - * Lista dos valores válidos para o parâmetro `range` nesta criptomoeda. - */ - validRanges?: Array; - } - - export namespace Coin { - /** - * Representa um ponto na série histórica de preços de uma criptomoeda. - */ - export interface HistoricalDataPrice { - /** - * Preço de fechamento ajustado (geralmente igual ao `close` para cripto). - */ - adjustedClose?: number; - - /** - * Preço de fechamento da criptomoeda no intervalo. - */ - close?: number; - - /** - * Data do ponto de dados, representada como um timestamp UNIX. - */ - date?: number; - - /** - * Preço máximo atingido no intervalo. - */ - high?: number; - - /** - * Preço mínimo atingido no intervalo. - */ - low?: number; - - /** - * Preço de abertura da criptomoeda no intervalo. - */ - open?: number; - - /** - * Volume negociado no intervalo (na criptomoeda ou na moeda de referência, - * verificar contexto). - */ - volume?: number; - } - } -} - -/** - * Resposta do endpoint que lista todas as criptomoedas disponíveis. - */ -export interface CryptoListAvailableResponse { - /** - * Lista de siglas (tickers) das criptomoedas disponíveis (ex: `BTC`, `ETH`, - * `LTC`). - */ - coins?: Array; -} - -export interface CryptoRetrieveParams { - /** - * **Obrigatório.** Uma ou mais siglas (tickers) de criptomoedas que você deseja - * consultar. Separe múltiplas siglas por vírgula (`,`). - * - * - **Exemplos:** `BTC`, `ETH,ADA`, `SOL`. - */ - coin: string; - - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** A sigla da moeda fiduciária na qual a cotação da(s) criptomoeda(s) - * deve ser retornada. Se omitido, o padrão é `BRL` (Real Brasileiro). - */ - currency?: string; - - /** - * **Opcional.** Define a granularidade (intervalo) dos dados históricos de preço - * (`historicalDataPrice`). Requer que `range` também seja especificado. Funciona - * de forma análoga ao endpoint de ações. - * - * - Valores: `1m`, `2m`, `5m`, `15m`, `30m`, `60m`, `90m`, `1h`, `1d`, `5d`, - * `1wk`, `1mo`, `3mo`. - */ - interval?: '1m' | '2m' | '5m' | '15m' | '30m' | '60m' | '90m' | '1h' | '1d' | '5d' | '1wk' | '1mo' | '3mo'; - - /** - * **Opcional.** Define o período para os dados históricos de preço - * (`historicalDataPrice`). Funciona de forma análoga ao endpoint de ações. Se - * omitido, apenas a cotação mais recente é retornada (a menos que `interval` seja - * usado). - * - * - Valores: `1d`, `5d`, `1mo`, `3mo`, `6mo`, `1y`, `2y`, `5y`, `10y`, `ytd`, - * `max`. - */ - range?: '1d' | '5d' | '1mo' | '3mo' | '6mo' | '1y' | '2y' | '5y' | '10y' | 'ytd' | 'max'; -} - -export interface CryptoListAvailableParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Termo para filtrar a lista de siglas de criptomoedas - * (correspondência parcial, case-insensitive). Se omitido, retorna todas as - * siglas. - */ - search?: string; -} - -export declare namespace Crypto { - export { - type CryptoRetrieveResponse as CryptoRetrieveResponse, - type CryptoListAvailableResponse as CryptoListAvailableResponse, - type CryptoRetrieveParams as CryptoRetrieveParams, - type CryptoListAvailableParams as CryptoListAvailableParams, - }; -} diff --git a/src/resources/v2/currency.ts b/src/resources/v2/currency.ts deleted file mode 100644 index 4a588b1..0000000 --- a/src/resources/v2/currency.ts +++ /dev/null @@ -1,246 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../../core/resource'; -import { APIPromise } from '../../core/api-promise'; -import { RequestOptions } from '../../internal/request-options'; - -export class Currency extends APIResource { - /** - * Obtenha cotações atualizadas para um ou mais pares de moedas fiduciárias (ex: - * USD-BRL, EUR-USD). - * - * ### Funcionalidades: - * - * - **Cotação Múltipla:** Consulte vários pares de moedas em uma única requisição - * usando o parâmetro `currency`. - * - **Dados Retornados:** Inclui nome do par, preços de compra (bid) e venda - * (ask), variação, máximas e mínimas, e timestamp da atualização. - * - * ### Parâmetros: - * - * - **`currency` (Obrigatório):** Uma lista de pares de moedas separados por - * vírgula, no formato `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`, `EUR-USD`). - * Consulte os pares disponíveis em - * [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). - * - **`token` (Obrigatório):** Seu token de autenticação. - * - * ### Autenticação: - * - * Requer token de autenticação válido via `token` (query) ou `Authorization` - * (header). - */ - retrieve(query: CurrencyRetrieveParams, options?: RequestOptions): APIPromise { - return this._client.get('/api/v2/currency', { query, ...options }); - } - - /** - * Obtenha a lista completa de todas as moedas fiduciárias suportadas pela API, - * geralmente utilizadas no parâmetro `currency` de outros endpoints (como o de - * criptomoedas) ou para futuras funcionalidades de conversão. - * - * ### Funcionalidade: - * - * - Retorna um array `currencies` com os nomes das moedas. - * - Pode ser filtrado usando o parâmetro `search`. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar todas as moedas disponíveis:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/currency/available?token=SEU_TOKEN" - * ``` - * - * **Buscar moedas cujo nome contenha 'Euro':** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/currency/available?search=Euro&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta é um objeto JSON com a chave `currencies`, contendo um array de - * objetos. Cada objeto possui uma chave `currency` com o nome completo da moeda - * (ex: `"Dólar Americano/Real Brasileiro"`). **Nota:** O formato do nome pode - * indicar um par de moedas, dependendo do contexto interno da API. - */ - listAvailable( - query: CurrencyListAvailableParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/currency/available', { query, ...options }); - } -} - -/** - * Estrutura da **resposta principal** do endpoint `GET /api/v2/currency`. - */ -export interface CurrencyRetrieveResponse { - /** - * Array contendo os objetos `CurrencyQuote`, um para cada par de moeda válido - * solicitado no parâmetro `currency`. - */ - currency: Array; -} - -export namespace CurrencyRetrieveResponse { - /** - * Contém os dados detalhados da cotação de um **par de moedas fiduciárias - * específico**, retornado como um elemento do array `currency` no endpoint - * `/api/v2/currency`. - */ - export interface Currency { - /** - * **Preço de Venda (Ask):** Preço atual pelo qual o mercado está disposto a vender - * a moeda de origem (`fromCurrency`) recebendo a moeda de destino (`toCurrency`). - * Formato String. - */ - askPrice: string; - - /** - * **Preço de Compra (Bid):** Preço atual pelo qual o mercado está disposto a - * comprar a moeda de origem (`fromCurrency`) pagando com a moeda de destino - * (`toCurrency`). Formato String. - */ - bidPrice: string; - - /** - * **Variação Absoluta (Bid):** Mudança absoluta no preço de compra (bid) desde o - * último fechamento ou período de referência. Formato String. - */ - bidVariation: string; - - /** - * **Moeda de Origem:** Sigla da moeda base do par (ex: `USD` em `USD-BRL`). - */ - fromCurrency: string; - - /** - * **Máxima:** Preço mais alto atingido pelo par no período recente (geralmente - * diário). Formato String. - */ - high: string; - - /** - * **Mínima:** Preço mais baixo atingido pelo par no período recente (geralmente - * diário). Formato String. - */ - low: string; - - /** - * **Nome do Par:** Nome descritivo do par de moedas (ex: - * `Dólar Americano/Real Brasileiro`). - */ - name: string; - - /** - * **Variação Percentual:** Mudança percentual no preço do par desde o último - * fechamento ou período de referência. Formato String. - */ - percentageChange: string; - - /** - * **Moeda de Destino:** Sigla da moeda de cotação do par (ex: `BRL` em `USD-BRL`). - */ - toCurrency: string; - - /** - * **Data da Atualização:** Data e hora da última atualização da cotação, formatada - * de forma legível (`YYYY-MM-DD HH:MM:SS`). - */ - updatedAtDate: string; - - /** - * **Timestamp da Atualização:** Data e hora da última atualização da cotação, - * representada como um **timestamp UNIX** (string contendo o número de segundos - * desde 1970-01-01 UTC). - */ - updatedAtTimestamp: string; - } -} - -/** - * Resposta do endpoint que lista todas as moedas fiduciárias disponíveis. - */ -export interface CurrencyListAvailableResponse { - /** - * Lista de objetos, cada um contendo o nome de uma moeda fiduciária ou par - * suportado pela API. - */ - currencies?: Array; -} - -export namespace CurrencyListAvailableResponse { - export interface Currency { - /** - * Nome da moeda ou par de moedas suportado (ex: `Dólar Americano/Real Brasileiro`, - * `Euro/Real Brasileiro`). A sigla pode ser extraída deste nome ou consultada em - * documentação adicional. - */ - currency?: string; - } -} - -export interface CurrencyRetrieveParams { - /** - * **Obrigatório.** Uma lista de um ou mais pares de moedas a serem consultados, - * separados por vírgula (`,`). - * - * - **Formato:** `MOEDA_ORIGEM-MOEDA_DESTINO` (ex: `USD-BRL`). - * - **Disponibilidade:** Consulte os pares válidos usando o endpoint - * [`/api/v2/currency/available`](#/Moedas/getAvailableCurrencies). - * - **Exemplo:** `USD-BRL,EUR-BRL,BTC-BRL` - */ - currency: string; - - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; -} - -export interface CurrencyListAvailableParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Termo para filtrar a lista pelo nome da moeda (correspondência - * parcial, case-insensitive). - */ - search?: string; -} - -export declare namespace Currency { - export { - type CurrencyRetrieveResponse as CurrencyRetrieveResponse, - type CurrencyListAvailableResponse as CurrencyListAvailableResponse, - type CurrencyRetrieveParams as CurrencyRetrieveParams, - type CurrencyListAvailableParams as CurrencyListAvailableParams, - }; -} diff --git a/src/resources/v2/index.ts b/src/resources/v2/index.ts deleted file mode 100644 index a169a99..0000000 --- a/src/resources/v2/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export { - Crypto, - type CryptoRetrieveResponse, - type CryptoListAvailableResponse, - type CryptoRetrieveParams, - type CryptoListAvailableParams, -} from './crypto'; -export { - Currency, - type CurrencyRetrieveResponse, - type CurrencyListAvailableResponse, - type CurrencyRetrieveParams, - type CurrencyListAvailableParams, -} from './currency'; -export { - Inflation, - type InflationRetrieveResponse, - type InflationListAvailableResponse, - type InflationRetrieveParams, - type InflationListAvailableParams, -} from './inflation'; -export { - PrimeRate, - type PrimeRateRetrieveResponse, - type PrimeRateListAvailableResponse, - type PrimeRateRetrieveParams, - type PrimeRateListAvailableParams, -} from './prime-rate'; -export { V2 } from './v2'; diff --git a/src/resources/v2/inflation.ts b/src/resources/v2/inflation.ts deleted file mode 100644 index 42cde96..0000000 --- a/src/resources/v2/inflation.ts +++ /dev/null @@ -1,226 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../../core/resource'; -import { APIPromise } from '../../core/api-promise'; -import { RequestOptions } from '../../internal/request-options'; - -export class Inflation extends APIResource { - /** - * Obtenha dados históricos sobre índices de inflação para um país específico. - * - * ### Funcionalidades: - * - * - **Seleção de País:** Especifique o país desejado com o parâmetro `country` - * (padrão: `brazil`). - * - **Filtragem por Período:** Defina um intervalo de datas com `start` e `end` - * (formato DD/MM/YYYY). - * - **Inclusão de Histórico:** O parâmetro `historical` (booleano) parece - * controlar a inclusão de dados históricos (verificar comportamento exato, pode - * ser redundante com `start`/`end`). - * - **Ordenação:** Ordene os resultados por data (`date`) ou valor (`value`) - * usando `sortBy` e `sortOrder`. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Buscar dados de inflação do Brasil para o ano de 2022, ordenados por valor - * ascendente:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&start=01/01/2022&end=31/12/2022&sortBy=value&sortOrder=asc&token=SEU_TOKEN" - * ``` - * - * **Buscar os dados mais recentes de inflação (sem período definido, ordenação - * padrão):** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/inflation?country=brazil&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta contém um array `inflation`, onde cada objeto representa um ponto de - * dado de inflação com sua `date` (DD/MM/YYYY), `value` (o índice de inflação como - * string) e `epochDate` (timestamp UNIX). - */ - retrieve( - query: InflationRetrieveParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/inflation', { query, ...options }); - } - - /** - * Obtenha a lista completa de todos os países para os quais a API Brapi possui - * dados de inflação disponíveis para consulta no endpoint `/api/v2/inflation`. - * - * ### Funcionalidade: - * - * - Retorna um array `countries` com os nomes dos países (em minúsculas). - * - Pode ser filtrado usando o parâmetro `search`. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar todos os países com dados de inflação:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/inflation/available?token=SEU_TOKEN" - * ``` - * - * **Buscar países cujo nome contenha 'arg':** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/inflation/available?search=arg&token=SEU_TOKEN" - * ``` - * - * ### Resposta: - * - * A resposta é um objeto JSON com a chave `countries`, contendo um array de - * strings com os nomes dos países (ex: `["brazil", "argentina", "usa"]`). - */ - listAvailable( - query: InflationListAvailableParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/inflation/available', { query, ...options }); - } -} - -/** - * Resposta principal do endpoint `/api/v2/inflation`. - */ -export interface InflationRetrieveResponse { - /** - * Array contendo os registros históricos de inflação para o país e período - * solicitados. - */ - inflation?: Array; -} - -export namespace InflationRetrieveResponse { - /** - * Representa um ponto de dado histórico de inflação para um país. - */ - export interface Inflation { - /** - * Data da medição da inflação, no formato `DD/MM/YYYY`. - */ - date?: string; - - /** - * Timestamp UNIX (número de segundos desde 1970-01-01 UTC) correspondente à - * `date`. - */ - epochDate?: number; - - /** - * Valor do índice de inflação para a data especificada (formato string, pode - * conter `%` ou ser apenas numérico). - */ - value?: string; - } -} - -/** - * Resposta do endpoint que lista os países com dados de inflação disponíveis. - */ -export interface InflationListAvailableResponse { - /** - * Lista de nomes de países (em minúsculas) para os quais há dados de inflação - * disponíveis (ex: `brazil`, `usa`, `argentina`). - */ - countries?: Array; -} - -export interface InflationRetrieveParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Nome do país para o qual buscar os dados de inflação. Use nomes em - * minúsculas. O padrão é `brazil`. Consulte `/api/v2/inflation/available` para a - * lista de países suportados. - */ - country?: string; - - /** - * **Opcional.** Data final do período desejado para os dados históricos, no - * formato `DD/MM/YYYY`. Requerido se `start` for especificado. - */ - end?: string; - - /** - * **Opcional.** Booleano (`true` ou `false`). Define se dados históricos devem ser - * incluídos. O comportamento exato em conjunto com `start`/`end` deve ser - * verificado. Padrão: `false`. - */ - historical?: boolean; - - /** - * **Opcional.** Campo pelo qual os resultados da inflação serão ordenados. - */ - sortBy?: 'date' | 'value'; - - /** - * **Opcional.** Direção da ordenação: `asc` (ascendente) ou `desc` (descendente). - * Padrão: `desc`. Requer que `sortBy` seja especificado. - */ - sortOrder?: 'asc' | 'desc'; - - /** - * **Opcional.** Data de início do período desejado para os dados históricos, no - * formato `DD/MM/YYYY`. Requerido se `end` for especificado. - */ - start?: string; -} - -export interface InflationListAvailableParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Termo para filtrar a lista pelo nome do país (correspondência - * parcial, case-insensitive). Se omitido, retorna todos os países. - */ - search?: string; -} - -export declare namespace Inflation { - export { - type InflationRetrieveResponse as InflationRetrieveResponse, - type InflationListAvailableResponse as InflationListAvailableResponse, - type InflationRetrieveParams as InflationRetrieveParams, - type InflationListAvailableParams as InflationListAvailableParams, - }; -} diff --git a/src/resources/v2/prime-rate.ts b/src/resources/v2/prime-rate.ts deleted file mode 100644 index 7d465e5..0000000 --- a/src/resources/v2/prime-rate.ts +++ /dev/null @@ -1,206 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../../core/resource'; -import { APIPromise } from '../../core/api-promise'; -import { RequestOptions } from '../../internal/request-options'; - -export class PrimeRate extends APIResource { - /** - * Obtenha informações atualizadas sobre a taxa básica de juros (SELIC) de um país - * por um período determinado. - * - * ### Funcionalidades: - * - * - **Seleção por País:** Especifique o país desejado usando o parâmetro `country` - * (padrão: brazil). - * - **Período Customizado:** Defina datas de início e fim com `start` e `end` para - * consultar um intervalo específico. - * - **Ordenação:** Ordene os resultados por data ou valor com os parâmetros - * `sortBy` e `sortOrder`. - * - **Dados Históricos:** Solicite o histórico completo ou apenas o valor mais - * recente com o parâmetro `historical`. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Taxa de juros do Brasil entre dezembro/2021 e janeiro/2022:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/prime-rate?country=brazil&start=01/12/2021&end=01/01/2022&sortBy=date&sortOrder=desc&token=SEU_TOKEN" - * ``` - */ - retrieve( - query: PrimeRateRetrieveParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/prime-rate', { query, ...options }); - } - - /** - * Liste todos os países disponíveis com dados de taxa básica de juros (SELIC) na - * API brapi. Este endpoint facilita a descoberta de quais países possuem dados - * disponíveis para consulta através do endpoint principal `/api/v2/prime-rate`. - * - * ### Funcionalidades: - * - * - **Busca Filtrada:** Utilize o parâmetro `search` para filtrar países por nome - * ou parte do nome. - * - **Ideal para Autocomplete:** Perfeito para implementar campos de busca com - * autocompletar em interfaces de usuário. - * - * ### Autenticação: - * - * Requer token de autenticação via `token` (query) ou `Authorization` (header). - * - * ### Exemplo de Requisição: - * - * **Listar países que contenham "BR" no nome:** - * - * ```bash - * curl -X GET "https://brapi.dev/api/v2/prime-rate/available?search=BR&token=SEU_TOKEN" - * ``` - */ - listAvailable( - query: PrimeRateListAvailableParams | null | undefined = {}, - options?: RequestOptions, - ): APIPromise { - return this._client.get('/api/v2/prime-rate/available', { query, ...options }); - } -} - -/** - * Resposta principal do endpoint `/api/v2/prime-rate`. - */ -export interface PrimeRateRetrieveResponse { - /** - * Array contendo os registros históricos de taxa básica de juros (SELIC) para o - * país e período solicitados. - */ - 'prime-rate'?: Array; -} - -export namespace PrimeRateRetrieveResponse { - /** - * Representa um registro individual de taxa básica de juros (SELIC) para uma data - * específica. - */ - export interface PrimeRate { - /** - * Data do registro no formato DD/MM/YYYY. - */ - date?: string; - - /** - * Timestamp em milissegundos (formato epoch) correspondente à data do registro. - */ - epochDate?: number; - - /** - * Valor da taxa básica de juros (SELIC) para a data correspondente. - */ - value?: string; - } -} - -/** - * Resposta do endpoint `/api/v2/prime-rate/available` que lista os países - * disponíveis para consulta de taxa básica de juros (SELIC). - */ -export interface PrimeRateListAvailableResponse { - /** - * Lista de países com dados de taxa básica de juros (SELIC) disponíveis para - * consulta. - */ - countries?: Array; -} - -export interface PrimeRateRetrieveParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** O país do qual você deseja obter informações sobre a taxa básica - * de juros. Por padrão, o país é definido como brazil. Você pode consultar a lista - * de países disponíveis através do endpoint `/api/v2/prime-rate/available`. - */ - country?: string; - - /** - * **Opcional.** Data final do período para busca no formato DD/MM/YYYY. Por padrão - * é a data atual. Útil quando `historical=true` para restringir o período da série - * histórica. - */ - end?: string; - - /** - * **Opcional.** Define se os dados históricos serão retornados. Se definido como - * `true`, retorna a série histórica completa. Se `false` (padrão) ou omitido, - * retorna apenas o valor mais recente. - */ - historical?: boolean; - - /** - * **Opcional.** Campo pelo qual os resultados serão ordenados. Por padrão, ordena - * por `date` (data). - */ - sortBy?: 'date' | 'value'; - - /** - * **Opcional.** Define se a ordenação será crescente (`asc`) ou decrescente - * (`desc`). Por padrão, é `desc` (decrescente). - */ - sortOrder?: 'asc' | 'desc'; - - /** - * **Opcional.** Data inicial do período para busca no formato DD/MM/YYYY. Útil - * quando `historical=true` para restringir o período da série histórica. - */ - start?: string; -} - -export interface PrimeRateListAvailableParams { - /** - * **Obrigatório caso não esteja adicionado como header "Authorization".** Seu - * token de autenticação pessoal da API Brapi. - * - * **Formas de Envio:** - * - * 1. **Query Parameter:** Adicione `?token=SEU_TOKEN` ao final da URL. - * 2. **HTTP Header:** Inclua o header `Authorization: Bearer SEU_TOKEN` na sua - * requisição. - * - * Ambos os métodos são aceitos, mas pelo menos um deles deve ser utilizado. - * Obtenha seu token em [brapi.dev/dashboard](https://brapi.dev/dashboard). - */ - token?: string; - - /** - * **Opcional.** Termo para filtrar a lista de países por nome. Retorna países - * cujos nomes contenham o termo especificado (case insensitive). - */ - search?: string; -} - -export declare namespace PrimeRate { - export { - type PrimeRateRetrieveResponse as PrimeRateRetrieveResponse, - type PrimeRateListAvailableResponse as PrimeRateListAvailableResponse, - type PrimeRateRetrieveParams as PrimeRateRetrieveParams, - type PrimeRateListAvailableParams as PrimeRateListAvailableParams, - }; -} diff --git a/src/resources/v2/v2.ts b/src/resources/v2/v2.ts deleted file mode 100644 index 84b7a2c..0000000 --- a/src/resources/v2/v2.ts +++ /dev/null @@ -1,81 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIResource } from '../../core/resource'; -import * as CryptoAPI from './crypto'; -import { - Crypto, - CryptoListAvailableParams, - CryptoListAvailableResponse, - CryptoRetrieveParams, - CryptoRetrieveResponse, -} from './crypto'; -import * as CurrencyAPI from './currency'; -import { - Currency, - CurrencyListAvailableParams, - CurrencyListAvailableResponse, - CurrencyRetrieveParams, - CurrencyRetrieveResponse, -} from './currency'; -import * as InflationAPI from './inflation'; -import { - Inflation, - InflationListAvailableParams, - InflationListAvailableResponse, - InflationRetrieveParams, - InflationRetrieveResponse, -} from './inflation'; -import * as PrimeRateAPI from './prime-rate'; -import { - PrimeRate, - PrimeRateListAvailableParams, - PrimeRateListAvailableResponse, - PrimeRateRetrieveParams, - PrimeRateRetrieveResponse, -} from './prime-rate'; - -export class V2 extends APIResource { - crypto: CryptoAPI.Crypto = new CryptoAPI.Crypto(this._client); - currency: CurrencyAPI.Currency = new CurrencyAPI.Currency(this._client); - inflation: InflationAPI.Inflation = new InflationAPI.Inflation(this._client); - primeRate: PrimeRateAPI.PrimeRate = new PrimeRateAPI.PrimeRate(this._client); -} - -V2.Crypto = Crypto; -V2.Currency = Currency; -V2.Inflation = Inflation; -V2.PrimeRate = PrimeRate; - -export declare namespace V2 { - export { - Crypto as Crypto, - type CryptoRetrieveResponse as CryptoRetrieveResponse, - type CryptoListAvailableResponse as CryptoListAvailableResponse, - type CryptoRetrieveParams as CryptoRetrieveParams, - type CryptoListAvailableParams as CryptoListAvailableParams, - }; - - export { - Currency as Currency, - type CurrencyRetrieveResponse as CurrencyRetrieveResponse, - type CurrencyListAvailableResponse as CurrencyListAvailableResponse, - type CurrencyRetrieveParams as CurrencyRetrieveParams, - type CurrencyListAvailableParams as CurrencyListAvailableParams, - }; - - export { - Inflation as Inflation, - type InflationRetrieveResponse as InflationRetrieveResponse, - type InflationListAvailableResponse as InflationListAvailableResponse, - type InflationRetrieveParams as InflationRetrieveParams, - type InflationListAvailableParams as InflationListAvailableParams, - }; - - export { - PrimeRate as PrimeRate, - type PrimeRateRetrieveResponse as PrimeRateRetrieveResponse, - type PrimeRateListAvailableResponse as PrimeRateListAvailableResponse, - type PrimeRateRetrieveParams as PrimeRateRetrieveParams, - type PrimeRateListAvailableParams as PrimeRateListAvailableParams, - }; -} diff --git a/src/uploads.ts b/src/uploads.ts deleted file mode 100644 index b2ef647..0000000 --- a/src/uploads.ts +++ /dev/null @@ -1,2 +0,0 @@ -/** @deprecated Import from ./core/uploads instead */ -export * from './core/uploads'; diff --git a/src/version.ts b/src/version.ts deleted file mode 100644 index bea2896..0000000 --- a/src/version.ts +++ /dev/null @@ -1 +0,0 @@ -export const VERSION = '1.0.0'; // x-release-please-version diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api-resources/available.test.ts b/tests/api-resources/available.test.ts deleted file mode 100644 index 6cd723e..0000000 --- a/tests/api-resources/available.test.ts +++ /dev/null @@ -1,30 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource available', () => { - // Prism tests are disabled - test.skip('list', async () => { - const responsePromise = client.available.list(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('list: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.available.list({ token: 'token', search: 'search' }, { path: '/_stainless_unknown_path' }), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api-resources/quote.test.ts b/tests/api-resources/quote.test.ts deleted file mode 100644 index b6cbe06..0000000 --- a/tests/api-resources/quote.test.ts +++ /dev/null @@ -1,73 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource quote', () => { - // Prism tests are disabled - test.skip('retrieve', async () => { - const responsePromise = client.quote.retrieve('PETR4,MGLU3'); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('retrieve: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.quote.retrieve( - 'PETR4,MGLU3', - { - token: 'token', - dividends: true, - fundamental: true, - interval: '1d', - modules: ['summaryProfile', 'balanceSheetHistory', 'financialData'], - range: '5d', - }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); - - // Prism tests are disabled - test.skip('list', async () => { - const responsePromise = client.quote.list(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('list: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.quote.list( - { - token: 'token', - limit: 1, - page: 1, - search: 'search', - sector: 'Retail Trade', - sortBy: 'name', - sortOrder: 'asc', - type: 'stock', - }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api-resources/v2/crypto.test.ts b/tests/api-resources/v2/crypto.test.ts deleted file mode 100644 index 33242f8..0000000 --- a/tests/api-resources/v2/crypto.test.ts +++ /dev/null @@ -1,56 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource crypto', () => { - // Prism tests are disabled - test.skip('retrieve: only required params', async () => { - const responsePromise = client.v2.crypto.retrieve({ coin: 'coin' }); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('retrieve: required and optional params', async () => { - const response = await client.v2.crypto.retrieve({ - coin: 'coin', - token: 'token', - currency: 'currency', - interval: '1m', - range: '1d', - }); - }); - - // Prism tests are disabled - test.skip('listAvailable', async () => { - const responsePromise = client.v2.crypto.listAvailable(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('listAvailable: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.crypto.listAvailable( - { token: 'token', search: 'search' }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api-resources/v2/currency.test.ts b/tests/api-resources/v2/currency.test.ts deleted file mode 100644 index 1b0bcda..0000000 --- a/tests/api-resources/v2/currency.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource currency', () => { - // Prism tests are disabled - test.skip('retrieve: only required params', async () => { - const responsePromise = client.v2.currency.retrieve({ currency: 'USD-BRL,EUR-USD' }); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('retrieve: required and optional params', async () => { - const response = await client.v2.currency.retrieve({ currency: 'USD-BRL,EUR-USD', token: 'token' }); - }); - - // Prism tests are disabled - test.skip('listAvailable', async () => { - const responsePromise = client.v2.currency.listAvailable(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('listAvailable: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.currency.listAvailable( - { token: 'token', search: 'search' }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api-resources/v2/inflation.test.ts b/tests/api-resources/v2/inflation.test.ts deleted file mode 100644 index 1b4798e..0000000 --- a/tests/api-resources/v2/inflation.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource inflation', () => { - // Prism tests are disabled - test.skip('retrieve', async () => { - const responsePromise = client.v2.inflation.retrieve(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('retrieve: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.inflation.retrieve( - { - token: 'token', - country: 'country', - end: '2019-12-27', - historical: true, - sortBy: 'date', - sortOrder: 'asc', - start: '2019-12-27', - }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); - - // Prism tests are disabled - test.skip('listAvailable', async () => { - const responsePromise = client.v2.inflation.listAvailable(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('listAvailable: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.inflation.listAvailable( - { token: 'token', search: 'search' }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api-resources/v2/prime-rate.test.ts b/tests/api-resources/v2/prime-rate.test.ts deleted file mode 100644 index 2c1e658..0000000 --- a/tests/api-resources/v2/prime-rate.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import Brapi from 'brapi'; - -const client = new Brapi({ - apiKey: 'My API Key', - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', -}); - -describe('resource primeRate', () => { - // Prism tests are disabled - test.skip('retrieve', async () => { - const responsePromise = client.v2.primeRate.retrieve(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('retrieve: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.primeRate.retrieve( - { - token: 'token', - country: 'country', - end: '2019-12-27', - historical: true, - sortBy: 'date', - sortOrder: 'asc', - start: '2019-12-27', - }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); - - // Prism tests are disabled - test.skip('listAvailable', async () => { - const responsePromise = client.v2.primeRate.listAvailable(); - const rawResponse = await responsePromise.asResponse(); - expect(rawResponse).toBeInstanceOf(Response); - const response = await responsePromise; - expect(response).not.toBeInstanceOf(Response); - const dataAndResponse = await responsePromise.withResponse(); - expect(dataAndResponse.data).toBe(response); - expect(dataAndResponse.response).toBe(rawResponse); - }); - - // Prism tests are disabled - test.skip('listAvailable: request options and params are passed correctly', async () => { - // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error - await expect( - client.v2.primeRate.listAvailable( - { token: 'token', search: 'search' }, - { path: '/_stainless_unknown_path' }, - ), - ).rejects.toThrow(Brapi.NotFoundError); - }); -}); diff --git a/tests/api_resources/__init__.py b/tests/api_resources/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/test_available.py b/tests/api_resources/test_available.py new file mode 100644 index 0000000..6b366e0 --- /dev/null +++ b/tests/api_resources/test_available.py @@ -0,0 +1,98 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from brapi.types import AvailableListResponse +from tests.utils import assert_matches_type + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestAvailable: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Brapi) -> None: + available = client.available.list() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Brapi) -> None: + available = client.available.list( + token="token", + search="search", + ) + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Brapi) -> None: + response = client.available.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + available = response.parse() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Brapi) -> None: + with client.available.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + available = response.parse() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncAvailable: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncBrapi) -> None: + available = await async_client.available.list() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncBrapi) -> None: + available = await async_client.available.list( + token="token", + search="search", + ) + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncBrapi) -> None: + response = await async_client.available.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + available = await response.parse() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncBrapi) -> None: + async with async_client.available.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + available = await response.parse() + assert_matches_type(AvailableListResponse, available, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_quote.py b/tests/api_resources/test_quote.py new file mode 100644 index 0000000..21a14a6 --- /dev/null +++ b/tests/api_resources/test_quote.py @@ -0,0 +1,222 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from brapi.types import QuoteListResponse, QuoteRetrieveResponse +from tests.utils import assert_matches_type + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestQuote: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Brapi) -> None: + quote = client.quote.retrieve( + tickers="PETR4,MGLU3", + ) + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_with_all_params(self, client: Brapi) -> None: + quote = client.quote.retrieve( + tickers="PETR4,MGLU3", + token="token", + dividends=True, + fundamental=True, + interval="1d", + modules=["summaryProfile", "balanceSheetHistory", "financialData"], + range="5d", + ) + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Brapi) -> None: + response = client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + quote = response.parse() + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Brapi) -> None: + with client.quote.with_streaming_response.retrieve( + tickers="PETR4,MGLU3", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + quote = response.parse() + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Brapi) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tickers` but received ''"): + client.quote.with_raw_response.retrieve( + tickers="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Brapi) -> None: + quote = client.quote.list() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Brapi) -> None: + quote = client.quote.list( + token="token", + limit=1, + page=1, + search="search", + sector="Retail Trade", + sort_by="name", + sort_order="asc", + type="stock", + ) + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Brapi) -> None: + response = client.quote.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + quote = response.parse() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Brapi) -> None: + with client.quote.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + quote = response.parse() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncQuote: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncBrapi) -> None: + quote = await async_client.quote.retrieve( + tickers="PETR4,MGLU3", + ) + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_with_all_params(self, async_client: AsyncBrapi) -> None: + quote = await async_client.quote.retrieve( + tickers="PETR4,MGLU3", + token="token", + dividends=True, + fundamental=True, + interval="1d", + modules=["summaryProfile", "balanceSheetHistory", "financialData"], + range="5d", + ) + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncBrapi) -> None: + response = await async_client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + quote = await response.parse() + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncBrapi) -> None: + async with async_client.quote.with_streaming_response.retrieve( + tickers="PETR4,MGLU3", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + quote = await response.parse() + assert_matches_type(QuoteRetrieveResponse, quote, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncBrapi) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tickers` but received ''"): + await async_client.quote.with_raw_response.retrieve( + tickers="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncBrapi) -> None: + quote = await async_client.quote.list() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncBrapi) -> None: + quote = await async_client.quote.list( + token="token", + limit=1, + page=1, + search="search", + sector="Retail Trade", + sort_by="name", + sort_order="asc", + type="stock", + ) + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncBrapi) -> None: + response = await async_client.quote.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + quote = await response.parse() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncBrapi) -> None: + async with async_client.quote.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + quote = await response.parse() + assert_matches_type(QuoteListResponse, quote, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/v2/__init__.py b/tests/api_resources/v2/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/v2/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/v2/test_crypto.py b/tests/api_resources/v2/test_crypto.py new file mode 100644 index 0000000..957328e --- /dev/null +++ b/tests/api_resources/v2/test_crypto.py @@ -0,0 +1,193 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from tests.utils import assert_matches_type +from brapi.types.v2 import ( + CryptoRetrieveResponse, + CryptoListAvailableResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestCrypto: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Brapi) -> None: + crypto = client.v2.crypto.retrieve( + coin="coin", + ) + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_with_all_params(self, client: Brapi) -> None: + crypto = client.v2.crypto.retrieve( + coin="coin", + token="token", + currency="currency", + interval="1m", + range="1d", + ) + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Brapi) -> None: + response = client.v2.crypto.with_raw_response.retrieve( + coin="coin", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + crypto = response.parse() + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Brapi) -> None: + with client.v2.crypto.with_streaming_response.retrieve( + coin="coin", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + crypto = response.parse() + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available(self, client: Brapi) -> None: + crypto = client.v2.crypto.list_available() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available_with_all_params(self, client: Brapi) -> None: + crypto = client.v2.crypto.list_available( + token="token", + search="search", + ) + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list_available(self, client: Brapi) -> None: + response = client.v2.crypto.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + crypto = response.parse() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list_available(self, client: Brapi) -> None: + with client.v2.crypto.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + crypto = response.parse() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncCrypto: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncBrapi) -> None: + crypto = await async_client.v2.crypto.retrieve( + coin="coin", + ) + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_with_all_params(self, async_client: AsyncBrapi) -> None: + crypto = await async_client.v2.crypto.retrieve( + coin="coin", + token="token", + currency="currency", + interval="1m", + range="1d", + ) + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.crypto.with_raw_response.retrieve( + coin="coin", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + crypto = await response.parse() + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.crypto.with_streaming_response.retrieve( + coin="coin", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + crypto = await response.parse() + assert_matches_type(CryptoRetrieveResponse, crypto, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available(self, async_client: AsyncBrapi) -> None: + crypto = await async_client.v2.crypto.list_available() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available_with_all_params(self, async_client: AsyncBrapi) -> None: + crypto = await async_client.v2.crypto.list_available( + token="token", + search="search", + ) + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list_available(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.crypto.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + crypto = await response.parse() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list_available(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.crypto.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + crypto = await response.parse() + assert_matches_type(CryptoListAvailableResponse, crypto, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/v2/test_currency.py b/tests/api_resources/v2/test_currency.py new file mode 100644 index 0000000..0326fc1 --- /dev/null +++ b/tests/api_resources/v2/test_currency.py @@ -0,0 +1,187 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from tests.utils import assert_matches_type +from brapi.types.v2 import ( + CurrencyRetrieveResponse, + CurrencyListAvailableResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestCurrency: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Brapi) -> None: + currency = client.v2.currency.retrieve( + currency="USD-BRL,EUR-USD", + ) + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_with_all_params(self, client: Brapi) -> None: + currency = client.v2.currency.retrieve( + currency="USD-BRL,EUR-USD", + token="token", + ) + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Brapi) -> None: + response = client.v2.currency.with_raw_response.retrieve( + currency="USD-BRL,EUR-USD", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + currency = response.parse() + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Brapi) -> None: + with client.v2.currency.with_streaming_response.retrieve( + currency="USD-BRL,EUR-USD", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + currency = response.parse() + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available(self, client: Brapi) -> None: + currency = client.v2.currency.list_available() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available_with_all_params(self, client: Brapi) -> None: + currency = client.v2.currency.list_available( + token="token", + search="search", + ) + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list_available(self, client: Brapi) -> None: + response = client.v2.currency.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + currency = response.parse() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list_available(self, client: Brapi) -> None: + with client.v2.currency.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + currency = response.parse() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncCurrency: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncBrapi) -> None: + currency = await async_client.v2.currency.retrieve( + currency="USD-BRL,EUR-USD", + ) + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_with_all_params(self, async_client: AsyncBrapi) -> None: + currency = await async_client.v2.currency.retrieve( + currency="USD-BRL,EUR-USD", + token="token", + ) + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.currency.with_raw_response.retrieve( + currency="USD-BRL,EUR-USD", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + currency = await response.parse() + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.currency.with_streaming_response.retrieve( + currency="USD-BRL,EUR-USD", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + currency = await response.parse() + assert_matches_type(CurrencyRetrieveResponse, currency, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available(self, async_client: AsyncBrapi) -> None: + currency = await async_client.v2.currency.list_available() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available_with_all_params(self, async_client: AsyncBrapi) -> None: + currency = await async_client.v2.currency.list_available( + token="token", + search="search", + ) + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list_available(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.currency.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + currency = await response.parse() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list_available(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.currency.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + currency = await response.parse() + assert_matches_type(CurrencyListAvailableResponse, currency, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/v2/test_inflation.py b/tests/api_resources/v2/test_inflation.py new file mode 100644 index 0000000..4071b4d --- /dev/null +++ b/tests/api_resources/v2/test_inflation.py @@ -0,0 +1,186 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from tests.utils import assert_matches_type +from brapi._utils import parse_date +from brapi.types.v2 import ( + InflationRetrieveResponse, + InflationListAvailableResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestInflation: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Brapi) -> None: + inflation = client.v2.inflation.retrieve() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_with_all_params(self, client: Brapi) -> None: + inflation = client.v2.inflation.retrieve( + token="token", + country="country", + end=parse_date("2019-12-27"), + historical=True, + sort_by="date", + sort_order="asc", + start=parse_date("2019-12-27"), + ) + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Brapi) -> None: + response = client.v2.inflation.with_raw_response.retrieve() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + inflation = response.parse() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Brapi) -> None: + with client.v2.inflation.with_streaming_response.retrieve() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + inflation = response.parse() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available(self, client: Brapi) -> None: + inflation = client.v2.inflation.list_available() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available_with_all_params(self, client: Brapi) -> None: + inflation = client.v2.inflation.list_available( + token="token", + search="search", + ) + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list_available(self, client: Brapi) -> None: + response = client.v2.inflation.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + inflation = response.parse() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list_available(self, client: Brapi) -> None: + with client.v2.inflation.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + inflation = response.parse() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncInflation: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncBrapi) -> None: + inflation = await async_client.v2.inflation.retrieve() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_with_all_params(self, async_client: AsyncBrapi) -> None: + inflation = await async_client.v2.inflation.retrieve( + token="token", + country="country", + end=parse_date("2019-12-27"), + historical=True, + sort_by="date", + sort_order="asc", + start=parse_date("2019-12-27"), + ) + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.inflation.with_raw_response.retrieve() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + inflation = await response.parse() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.inflation.with_streaming_response.retrieve() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + inflation = await response.parse() + assert_matches_type(InflationRetrieveResponse, inflation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available(self, async_client: AsyncBrapi) -> None: + inflation = await async_client.v2.inflation.list_available() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available_with_all_params(self, async_client: AsyncBrapi) -> None: + inflation = await async_client.v2.inflation.list_available( + token="token", + search="search", + ) + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list_available(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.inflation.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + inflation = await response.parse() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list_available(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.inflation.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + inflation = await response.parse() + assert_matches_type(InflationListAvailableResponse, inflation, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/v2/test_prime_rate.py b/tests/api_resources/v2/test_prime_rate.py new file mode 100644 index 0000000..ecb5738 --- /dev/null +++ b/tests/api_resources/v2/test_prime_rate.py @@ -0,0 +1,186 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from brapi import Brapi, AsyncBrapi +from tests.utils import assert_matches_type +from brapi._utils import parse_date +from brapi.types.v2 import ( + PrimeRateRetrieveResponse, + PrimeRateListAvailableResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestPrimeRate: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Brapi) -> None: + prime_rate = client.v2.prime_rate.retrieve() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_with_all_params(self, client: Brapi) -> None: + prime_rate = client.v2.prime_rate.retrieve( + token="token", + country="country", + end=parse_date("2019-12-27"), + historical=True, + sort_by="date", + sort_order="asc", + start=parse_date("2019-12-27"), + ) + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Brapi) -> None: + response = client.v2.prime_rate.with_raw_response.retrieve() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + prime_rate = response.parse() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Brapi) -> None: + with client.v2.prime_rate.with_streaming_response.retrieve() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + prime_rate = response.parse() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available(self, client: Brapi) -> None: + prime_rate = client.v2.prime_rate.list_available() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_available_with_all_params(self, client: Brapi) -> None: + prime_rate = client.v2.prime_rate.list_available( + token="token", + search="search", + ) + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list_available(self, client: Brapi) -> None: + response = client.v2.prime_rate.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + prime_rate = response.parse() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list_available(self, client: Brapi) -> None: + with client.v2.prime_rate.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + prime_rate = response.parse() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncPrimeRate: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncBrapi) -> None: + prime_rate = await async_client.v2.prime_rate.retrieve() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_with_all_params(self, async_client: AsyncBrapi) -> None: + prime_rate = await async_client.v2.prime_rate.retrieve( + token="token", + country="country", + end=parse_date("2019-12-27"), + historical=True, + sort_by="date", + sort_order="asc", + start=parse_date("2019-12-27"), + ) + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.prime_rate.with_raw_response.retrieve() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + prime_rate = await response.parse() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.prime_rate.with_streaming_response.retrieve() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + prime_rate = await response.parse() + assert_matches_type(PrimeRateRetrieveResponse, prime_rate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available(self, async_client: AsyncBrapi) -> None: + prime_rate = await async_client.v2.prime_rate.list_available() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_available_with_all_params(self, async_client: AsyncBrapi) -> None: + prime_rate = await async_client.v2.prime_rate.list_available( + token="token", + search="search", + ) + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list_available(self, async_client: AsyncBrapi) -> None: + response = await async_client.v2.prime_rate.with_raw_response.list_available() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + prime_rate = await response.parse() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list_available(self, async_client: AsyncBrapi) -> None: + async with async_client.v2.prime_rate.with_streaming_response.list_available() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + prime_rate = await response.parse() + assert_matches_type(PrimeRateListAvailableResponse, prime_rate, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/base64.test.ts b/tests/base64.test.ts deleted file mode 100644 index 0f65386..0000000 --- a/tests/base64.test.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { fromBase64, toBase64 } from 'brapi/internal/utils/base64'; - -describe.each(['Buffer', 'atob'])('with %s', (mode) => { - let originalBuffer: BufferConstructor; - beforeAll(() => { - if (mode === 'atob') { - originalBuffer = globalThis.Buffer; - // @ts-expect-error Can't assign undefined to BufferConstructor - delete globalThis.Buffer; - } - }); - afterAll(() => { - if (mode === 'atob') { - globalThis.Buffer = originalBuffer; - } - }); - test('toBase64', () => { - const testCases = [ - { - input: 'hello world', - expected: 'aGVsbG8gd29ybGQ=', - }, - { - input: new Uint8Array([104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100]), - expected: 'aGVsbG8gd29ybGQ=', - }, - { - input: undefined, - expected: '', - }, - { - input: new Uint8Array([ - 229, 102, 215, 230, 65, 22, 46, 87, 243, 176, 99, 99, 31, 174, 8, 242, 83, 142, 169, 64, 122, 123, - 193, 71, - ]), - expected: '5WbX5kEWLlfzsGNjH64I8lOOqUB6e8FH', - }, - { - input: '✓', - expected: '4pyT', - }, - { - input: new Uint8Array([226, 156, 147]), - expected: '4pyT', - }, - ]; - - testCases.forEach(({ input, expected }) => { - expect(toBase64(input)).toBe(expected); - }); - }); - - test('fromBase64', () => { - const testCases = [ - { - input: 'aGVsbG8gd29ybGQ=', - expected: new Uint8Array([104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100]), - }, - { - input: '', - expected: new Uint8Array([]), - }, - { - input: '5WbX5kEWLlfzsGNjH64I8lOOqUB6e8FH', - expected: new Uint8Array([ - 229, 102, 215, 230, 65, 22, 46, 87, 243, 176, 99, 99, 31, 174, 8, 242, 83, 142, 169, 64, 122, 123, - 193, 71, - ]), - }, - { - input: '4pyT', - expected: new Uint8Array([226, 156, 147]), - }, - ]; - - testCases.forEach(({ input, expected }) => { - expect(fromBase64(input)).toEqual(expected); - }); - }); -}); diff --git a/tests/buildHeaders.test.ts b/tests/buildHeaders.test.ts deleted file mode 100644 index e33eb0a..0000000 --- a/tests/buildHeaders.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { inspect } from 'node:util'; -import { buildHeaders, type HeadersLike, type NullableHeaders } from 'brapi/internal/headers'; - -function inspectNullableHeaders(headers: NullableHeaders) { - return `NullableHeaders {${[ - ...[...headers.values.entries()].map(([name, value]) => ` ${inspect(name)}: ${inspect(value)}`), - ...[...headers.nulls].map((name) => ` ${inspect(name)}: null`), - ].join(', ')} }`; -} - -describe('buildHeaders', () => { - const cases: [HeadersLike[], string][] = [ - [[new Headers({ 'content-type': 'text/plain' })], `NullableHeaders { 'content-type': 'text/plain' }`], - [ - [ - { - 'content-type': 'text/plain', - }, - { - 'Content-Type': undefined, - }, - ], - `NullableHeaders { 'content-type': 'text/plain' }`, - ], - [ - [ - { - 'content-type': 'text/plain', - }, - { - 'Content-Type': null, - }, - ], - `NullableHeaders { 'content-type': null }`, - ], - [ - [ - { - cookie: 'name1=value1', - Cookie: 'name2=value2', - }, - ], - `NullableHeaders { 'cookie': 'name2=value2' }`, - ], - [ - [ - { - cookie: 'name1=value1', - Cookie: undefined, - }, - ], - `NullableHeaders { 'cookie': 'name1=value1' }`, - ], - [ - [ - { - cookie: ['name1=value1', 'name2=value2'], - }, - ], - `NullableHeaders { 'cookie': 'name1=value1; name2=value2' }`, - ], - [ - [ - { - 'x-foo': ['name1=value1', 'name2=value2'], - }, - ], - `NullableHeaders { 'x-foo': 'name1=value1, name2=value2' }`, - ], - [ - [ - [ - ['cookie', 'name1=value1'], - ['cookie', 'name2=value2'], - ['Cookie', 'name3=value3'], - ], - ], - `NullableHeaders { 'cookie': 'name1=value1; name2=value2; name3=value3' }`, - ], - [[undefined], `NullableHeaders { }`], - [[null], `NullableHeaders { }`], - ]; - for (const [input, expected] of cases) { - test(expected, () => { - expect(inspectNullableHeaders(buildHeaders(input))).toEqual(expected); - }); - } -}); diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..a4865b6 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,84 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +import logging +from typing import TYPE_CHECKING, Iterator, AsyncIterator + +import httpx +import pytest +from pytest_asyncio import is_async_test + +from brapi import Brapi, AsyncBrapi, DefaultAioHttpClient +from brapi._utils import is_dict + +if TYPE_CHECKING: + from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] + +pytest.register_assert_rewrite("tests.utils") + +logging.getLogger("brapi").setLevel(logging.DEBUG) + + +# automatically add `pytest.mark.asyncio()` to all of our async tests +# so we don't have to add that boilerplate everywhere +def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: + pytest_asyncio_tests = (item for item in items if is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) + + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + +api_key = "My API Key" + + +@pytest.fixture(scope="session") +def client(request: FixtureRequest) -> Iterator[Brapi]: + strict = getattr(request, "param", True) + if not isinstance(strict, bool): + raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") + + with Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + yield client + + +@pytest.fixture(scope="session") +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncBrapi]: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client + ) as client: + yield client diff --git a/tests/form.test.ts b/tests/form.test.ts deleted file mode 100644 index f633e17..0000000 --- a/tests/form.test.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { multipartFormRequestOptions, createForm } from 'brapi/internal/uploads'; -import { toFile } from 'brapi/core/uploads'; - -describe('form data validation', () => { - test('valid values do not error', async () => { - await multipartFormRequestOptions( - { - body: { - foo: 'foo', - string: 1, - bool: true, - file: await toFile(Buffer.from('some-content')), - blob: new Blob(['Some content'], { type: 'text/plain' }), - }, - }, - fetch, - ); - }); - - test('null', async () => { - await expect(() => - multipartFormRequestOptions( - { - body: { - null: null, - }, - }, - fetch, - ), - ).rejects.toThrow(TypeError); - }); - - test('undefined is stripped', async () => { - const form = await createForm( - { - foo: undefined, - bar: 'baz', - }, - fetch, - ); - expect(form.has('foo')).toBe(false); - expect(form.get('bar')).toBe('baz'); - }); - - test('nested undefined property is stripped', async () => { - const form = await createForm( - { - bar: { - baz: undefined, - }, - }, - fetch, - ); - expect(Array.from(form.entries())).toEqual([]); - - const form2 = await createForm( - { - bar: { - foo: 'string', - baz: undefined, - }, - }, - fetch, - ); - expect(Array.from(form2.entries())).toEqual([['bar[foo]', 'string']]); - }); - - test('nested undefined array item is stripped', async () => { - const form = await createForm( - { - bar: [undefined, undefined], - }, - fetch, - ); - expect(Array.from(form.entries())).toEqual([]); - - const form2 = await createForm( - { - bar: [undefined, 'foo'], - }, - fetch, - ); - expect(Array.from(form2.entries())).toEqual([['bar[]', 'foo']]); - }); -}); diff --git a/tests/index.test.ts b/tests/index.test.ts deleted file mode 100644 index 72dfe03..0000000 --- a/tests/index.test.ts +++ /dev/null @@ -1,735 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { APIPromise } from 'brapi/core/api-promise'; - -import util from 'node:util'; -import Brapi from 'brapi'; -import { APIUserAbortError } from 'brapi'; -const defaultFetch = fetch; - -describe('instantiate client', () => { - const env = process.env; - - beforeEach(() => { - jest.resetModules(); - process.env = { ...env }; - }); - - afterEach(() => { - process.env = env; - }); - - describe('defaultHeaders', () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - defaultHeaders: { 'X-My-Default-Header': '2' }, - apiKey: 'My API Key', - }); - - test('they are used in the request', async () => { - const { req } = await client.buildRequest({ path: '/foo', method: 'post' }); - expect(req.headers.get('x-my-default-header')).toEqual('2'); - }); - - test('can ignore `undefined` and leave the default', async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - headers: { 'X-My-Default-Header': undefined }, - }); - expect(req.headers.get('x-my-default-header')).toEqual('2'); - }); - - test('can be removed with `null`', async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - headers: { 'X-My-Default-Header': null }, - }); - expect(req.headers.has('x-my-default-header')).toBe(false); - }); - }); - describe('logging', () => { - const env = process.env; - - beforeEach(() => { - process.env = { ...env }; - process.env['BRAPI_LOG'] = undefined; - }); - - afterEach(() => { - process.env = env; - }); - - const forceAPIResponseForClient = async (client: Brapi) => { - await new APIPromise( - client, - Promise.resolve({ - response: new Response(), - controller: new AbortController(), - requestLogID: 'log_000000', - retryOfRequestLogID: undefined, - startTime: Date.now(), - options: { - method: 'get', - path: '/', - }, - }), - ); - }; - - test('debug logs when log level is debug', async () => { - const debugMock = jest.fn(); - const logger = { - debug: debugMock, - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - }; - - const client = new Brapi({ logger: logger, logLevel: 'debug', apiKey: 'My API Key' }); - - await forceAPIResponseForClient(client); - expect(debugMock).toHaveBeenCalled(); - }); - - test('default logLevel is warn', async () => { - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.logLevel).toBe('warn'); - }); - - test('debug logs are skipped when log level is info', async () => { - const debugMock = jest.fn(); - const logger = { - debug: debugMock, - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - }; - - const client = new Brapi({ logger: logger, logLevel: 'info', apiKey: 'My API Key' }); - - await forceAPIResponseForClient(client); - expect(debugMock).not.toHaveBeenCalled(); - }); - - test('debug logs happen with debug env var', async () => { - const debugMock = jest.fn(); - const logger = { - debug: debugMock, - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - }; - - process.env['BRAPI_LOG'] = 'debug'; - const client = new Brapi({ logger: logger, apiKey: 'My API Key' }); - expect(client.logLevel).toBe('debug'); - - await forceAPIResponseForClient(client); - expect(debugMock).toHaveBeenCalled(); - }); - - test('warn when env var level is invalid', async () => { - const warnMock = jest.fn(); - const logger = { - debug: jest.fn(), - info: jest.fn(), - warn: warnMock, - error: jest.fn(), - }; - - process.env['BRAPI_LOG'] = 'not a log level'; - const client = new Brapi({ logger: logger, apiKey: 'My API Key' }); - expect(client.logLevel).toBe('warn'); - expect(warnMock).toHaveBeenCalledWith( - 'process.env[\'BRAPI_LOG\'] was set to "not a log level", expected one of ["off","error","warn","info","debug"]', - ); - }); - - test('client log level overrides env var', async () => { - const debugMock = jest.fn(); - const logger = { - debug: debugMock, - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - }; - - process.env['BRAPI_LOG'] = 'debug'; - const client = new Brapi({ logger: logger, logLevel: 'off', apiKey: 'My API Key' }); - - await forceAPIResponseForClient(client); - expect(debugMock).not.toHaveBeenCalled(); - }); - - test('no warning logged for invalid env var level + valid client level', async () => { - const warnMock = jest.fn(); - const logger = { - debug: jest.fn(), - info: jest.fn(), - warn: warnMock, - error: jest.fn(), - }; - - process.env['BRAPI_LOG'] = 'not a log level'; - const client = new Brapi({ logger: logger, logLevel: 'debug', apiKey: 'My API Key' }); - expect(client.logLevel).toBe('debug'); - expect(warnMock).not.toHaveBeenCalled(); - }); - }); - - describe('defaultQuery', () => { - test('with null query params given', () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - defaultQuery: { apiVersion: 'foo' }, - apiKey: 'My API Key', - }); - expect(client.buildURL('/foo', null)).toEqual('http://localhost:5000/foo?apiVersion=foo'); - }); - - test('multiple default query params', () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - defaultQuery: { apiVersion: 'foo', hello: 'world' }, - apiKey: 'My API Key', - }); - expect(client.buildURL('/foo', null)).toEqual('http://localhost:5000/foo?apiVersion=foo&hello=world'); - }); - - test('overriding with `undefined`', () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - defaultQuery: { hello: 'world' }, - apiKey: 'My API Key', - }); - expect(client.buildURL('/foo', { hello: undefined })).toEqual('http://localhost:5000/foo'); - }); - }); - - test('custom fetch', async () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - apiKey: 'My API Key', - fetch: (url) => { - return Promise.resolve( - new Response(JSON.stringify({ url, custom: true }), { - headers: { 'Content-Type': 'application/json' }, - }), - ); - }, - }); - - const response = await client.get('/foo'); - expect(response).toEqual({ url: 'http://localhost:5000/foo', custom: true }); - }); - - test('explicit global fetch', async () => { - // make sure the global fetch type is assignable to our Fetch type - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - apiKey: 'My API Key', - fetch: defaultFetch, - }); - }); - - test('custom signal', async () => { - const client = new Brapi({ - baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', - apiKey: 'My API Key', - fetch: (...args) => { - return new Promise((resolve, reject) => - setTimeout( - () => - defaultFetch(...args) - .then(resolve) - .catch(reject), - 300, - ), - ); - }, - }); - - const controller = new AbortController(); - setTimeout(() => controller.abort(), 200); - - const spy = jest.spyOn(client, 'request'); - - await expect(client.get('/foo', { signal: controller.signal })).rejects.toThrowError(APIUserAbortError); - expect(spy).toHaveBeenCalledTimes(1); - }); - - test('normalized method', async () => { - let capturedRequest: RequestInit | undefined; - const testFetch = async (url: string | URL | Request, init: RequestInit = {}): Promise => { - capturedRequest = init; - return new Response(JSON.stringify({}), { headers: { 'Content-Type': 'application/json' } }); - }; - - const client = new Brapi({ baseURL: 'http://localhost:5000/', apiKey: 'My API Key', fetch: testFetch }); - - await client.patch('/foo'); - expect(capturedRequest?.method).toEqual('PATCH'); - }); - - describe('baseUrl', () => { - test('trailing slash', () => { - const client = new Brapi({ baseURL: 'http://localhost:5000/custom/path/', apiKey: 'My API Key' }); - expect(client.buildURL('/foo', null)).toEqual('http://localhost:5000/custom/path/foo'); - }); - - test('no trailing slash', () => { - const client = new Brapi({ baseURL: 'http://localhost:5000/custom/path', apiKey: 'My API Key' }); - expect(client.buildURL('/foo', null)).toEqual('http://localhost:5000/custom/path/foo'); - }); - - afterEach(() => { - process.env['BRAPI_BASE_URL'] = undefined; - }); - - test('explicit option', () => { - const client = new Brapi({ baseURL: 'https://example.com', apiKey: 'My API Key' }); - expect(client.baseURL).toEqual('https://example.com'); - }); - - test('env variable', () => { - process.env['BRAPI_BASE_URL'] = 'https://example.com/from_env'; - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.baseURL).toEqual('https://example.com/from_env'); - }); - - test('empty env variable', () => { - process.env['BRAPI_BASE_URL'] = ''; // empty - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.baseURL).toEqual('https://brapi.dev'); - }); - - test('blank env variable', () => { - process.env['BRAPI_BASE_URL'] = ' '; // blank - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.baseURL).toEqual('https://brapi.dev'); - }); - - test('env variable with environment', () => { - process.env['BRAPI_BASE_URL'] = 'https://example.com/from_env'; - - expect( - () => new Brapi({ apiKey: 'My API Key', environment: 'production' }), - ).toThrowErrorMatchingInlineSnapshot( - `"Ambiguous URL; The \`baseURL\` option (or BRAPI_BASE_URL env var) and the \`environment\` option are given. If you want to use the environment you must pass baseURL: null"`, - ); - - const client = new Brapi({ apiKey: 'My API Key', baseURL: null, environment: 'production' }); - expect(client.baseURL).toEqual('https://brapi.dev'); - }); - - test('in request options', () => { - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.buildURL('/foo', null, 'http://localhost:5000/option')).toEqual( - 'http://localhost:5000/option/foo', - ); - }); - - test('in request options overridden by client options', () => { - const client = new Brapi({ apiKey: 'My API Key', baseURL: 'http://localhost:5000/client' }); - expect(client.buildURL('/foo', null, 'http://localhost:5000/option')).toEqual( - 'http://localhost:5000/client/foo', - ); - }); - - test('in request options overridden by env variable', () => { - process.env['BRAPI_BASE_URL'] = 'http://localhost:5000/env'; - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.buildURL('/foo', null, 'http://localhost:5000/option')).toEqual( - 'http://localhost:5000/env/foo', - ); - }); - }); - - test('maxRetries option is correctly set', () => { - const client = new Brapi({ maxRetries: 4, apiKey: 'My API Key' }); - expect(client.maxRetries).toEqual(4); - - // default - const client2 = new Brapi({ apiKey: 'My API Key' }); - expect(client2.maxRetries).toEqual(2); - }); - - describe('withOptions', () => { - test('creates a new client with overridden options', async () => { - const client = new Brapi({ baseURL: 'http://localhost:5000/', maxRetries: 3, apiKey: 'My API Key' }); - - const newClient = client.withOptions({ - maxRetries: 5, - baseURL: 'http://localhost:5001/', - }); - - // Verify the new client has updated options - expect(newClient.maxRetries).toEqual(5); - expect(newClient.baseURL).toEqual('http://localhost:5001/'); - - // Verify the original client is unchanged - expect(client.maxRetries).toEqual(3); - expect(client.baseURL).toEqual('http://localhost:5000/'); - - // Verify it's a different instance - expect(newClient).not.toBe(client); - expect(newClient.constructor).toBe(client.constructor); - }); - - test('inherits options from the parent client', async () => { - const client = new Brapi({ - baseURL: 'http://localhost:5000/', - defaultHeaders: { 'X-Test-Header': 'test-value' }, - defaultQuery: { 'test-param': 'test-value' }, - apiKey: 'My API Key', - }); - - const newClient = client.withOptions({ - baseURL: 'http://localhost:5001/', - }); - - // Test inherited options remain the same - expect(newClient.buildURL('/foo', null)).toEqual('http://localhost:5001/foo?test-param=test-value'); - - const { req } = await newClient.buildRequest({ path: '/foo', method: 'get' }); - expect(req.headers.get('x-test-header')).toEqual('test-value'); - }); - - test('respects runtime property changes when creating new client', () => { - const client = new Brapi({ baseURL: 'http://localhost:5000/', timeout: 1000, apiKey: 'My API Key' }); - - // Modify the client properties directly after creation - client.baseURL = 'http://localhost:6000/'; - client.timeout = 2000; - - // Create a new client with withOptions - const newClient = client.withOptions({ - maxRetries: 10, - }); - - // Verify the new client uses the updated properties, not the original ones - expect(newClient.baseURL).toEqual('http://localhost:6000/'); - expect(newClient.timeout).toEqual(2000); - expect(newClient.maxRetries).toEqual(10); - - // Original client should still have its modified properties - expect(client.baseURL).toEqual('http://localhost:6000/'); - expect(client.timeout).toEqual(2000); - expect(client.maxRetries).not.toEqual(10); - - // Verify URL building uses the updated baseURL - expect(newClient.buildURL('/bar', null)).toEqual('http://localhost:6000/bar'); - }); - }); - - test('with environment variable arguments', () => { - // set options via env var - process.env['BRAPI_API_KEY'] = 'My API Key'; - const client = new Brapi(); - expect(client.apiKey).toBe('My API Key'); - }); - - test('with overridden environment variable arguments', () => { - // set options via env var - process.env['BRAPI_API_KEY'] = 'another My API Key'; - const client = new Brapi({ apiKey: 'My API Key' }); - expect(client.apiKey).toBe('My API Key'); - }); -}); - -describe('request building', () => { - const client = new Brapi({ apiKey: 'My API Key' }); - - describe('custom headers', () => { - test('handles undefined', async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - body: { value: 'hello' }, - headers: { 'X-Foo': 'baz', 'x-foo': 'bar', 'x-Foo': undefined, 'x-baz': 'bam', 'X-Baz': null }, - }); - expect(req.headers.get('x-foo')).toEqual('bar'); - expect(req.headers.get('x-Foo')).toEqual('bar'); - expect(req.headers.get('X-Foo')).toEqual('bar'); - expect(req.headers.get('x-baz')).toEqual(null); - }); - }); -}); - -describe('default encoder', () => { - const client = new Brapi({ apiKey: 'My API Key' }); - - class Serializable { - toJSON() { - return { $type: 'Serializable' }; - } - } - class Collection { - #things: T[]; - constructor(things: T[]) { - this.#things = Array.from(things); - } - toJSON() { - return Array.from(this.#things); - } - [Symbol.iterator]() { - return this.#things[Symbol.iterator]; - } - } - for (const jsonValue of [{}, [], { __proto__: null }, new Serializable(), new Collection(['item'])]) { - test(`serializes ${util.inspect(jsonValue)} as json`, async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - body: jsonValue, - }); - expect(req.headers).toBeInstanceOf(Headers); - expect(req.headers.get('content-type')).toEqual('application/json'); - expect(req.body).toBe(JSON.stringify(jsonValue)); - }); - } - - const encoder = new TextEncoder(); - const asyncIterable = (async function* () { - yield encoder.encode('a\n'); - yield encoder.encode('b\n'); - yield encoder.encode('c\n'); - })(); - for (const streamValue of [ - [encoder.encode('a\nb\nc\n')][Symbol.iterator](), - new Response('a\nb\nc\n').body, - asyncIterable, - ]) { - test(`converts ${util.inspect(streamValue)} to ReadableStream`, async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - body: streamValue, - }); - expect(req.headers).toBeInstanceOf(Headers); - expect(req.headers.get('content-type')).toEqual(null); - expect(req.body).toBeInstanceOf(ReadableStream); - expect(await new Response(req.body).text()).toBe('a\nb\nc\n'); - }); - } - - test(`can set content-type for ReadableStream`, async () => { - const { req } = await client.buildRequest({ - path: '/foo', - method: 'post', - body: new Response('a\nb\nc\n').body, - headers: { 'Content-Type': 'text/plain' }, - }); - expect(req.headers).toBeInstanceOf(Headers); - expect(req.headers.get('content-type')).toEqual('text/plain'); - expect(req.body).toBeInstanceOf(ReadableStream); - expect(await new Response(req.body).text()).toBe('a\nb\nc\n'); - }); -}); - -describe('retries', () => { - test('retry on timeout', async () => { - let count = 0; - const testFetch = async ( - url: string | URL | Request, - { signal }: RequestInit = {}, - ): Promise => { - if (count++ === 0) { - return new Promise( - (resolve, reject) => signal?.addEventListener('abort', () => reject(new Error('timed out'))), - ); - } - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - - const client = new Brapi({ apiKey: 'My API Key', timeout: 10, fetch: testFetch }); - - expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); - expect(count).toEqual(2); - expect( - await client - .request({ path: '/foo', method: 'get' }) - .asResponse() - .then((r) => r.text()), - ).toEqual(JSON.stringify({ a: 1 })); - expect(count).toEqual(3); - }); - - test('retry count header', async () => { - let count = 0; - let capturedRequest: RequestInit | undefined; - const testFetch = async (url: string | URL | Request, init: RequestInit = {}): Promise => { - count++; - if (count <= 2) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After': '0.1', - }, - }); - } - capturedRequest = init; - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - - const client = new Brapi({ apiKey: 'My API Key', fetch: testFetch, maxRetries: 4 }); - - expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); - - expect((capturedRequest!.headers as Headers).get('x-stainless-retry-count')).toEqual('2'); - expect(count).toEqual(3); - }); - - test('omit retry count header', async () => { - let count = 0; - let capturedRequest: RequestInit | undefined; - const testFetch = async (url: string | URL | Request, init: RequestInit = {}): Promise => { - count++; - if (count <= 2) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After': '0.1', - }, - }); - } - capturedRequest = init; - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - const client = new Brapi({ apiKey: 'My API Key', fetch: testFetch, maxRetries: 4 }); - - expect( - await client.request({ - path: '/foo', - method: 'get', - headers: { 'X-Stainless-Retry-Count': null }, - }), - ).toEqual({ a: 1 }); - - expect((capturedRequest!.headers as Headers).has('x-stainless-retry-count')).toBe(false); - }); - - test('omit retry count header by default', async () => { - let count = 0; - let capturedRequest: RequestInit | undefined; - const testFetch = async (url: string | URL | Request, init: RequestInit = {}): Promise => { - count++; - if (count <= 2) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After': '0.1', - }, - }); - } - capturedRequest = init; - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - const client = new Brapi({ - apiKey: 'My API Key', - fetch: testFetch, - maxRetries: 4, - defaultHeaders: { 'X-Stainless-Retry-Count': null }, - }); - - expect( - await client.request({ - path: '/foo', - method: 'get', - }), - ).toEqual({ a: 1 }); - - expect(capturedRequest!.headers as Headers).not.toHaveProperty('x-stainless-retry-count'); - }); - - test('overwrite retry count header', async () => { - let count = 0; - let capturedRequest: RequestInit | undefined; - const testFetch = async (url: string | URL | Request, init: RequestInit = {}): Promise => { - count++; - if (count <= 2) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After': '0.1', - }, - }); - } - capturedRequest = init; - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - const client = new Brapi({ apiKey: 'My API Key', fetch: testFetch, maxRetries: 4 }); - - expect( - await client.request({ - path: '/foo', - method: 'get', - headers: { 'X-Stainless-Retry-Count': '42' }, - }), - ).toEqual({ a: 1 }); - - expect((capturedRequest!.headers as Headers).get('x-stainless-retry-count')).toEqual('42'); - }); - - test('retry on 429 with retry-after', async () => { - let count = 0; - const testFetch = async ( - url: string | URL | Request, - { signal }: RequestInit = {}, - ): Promise => { - if (count++ === 0) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After': '0.1', - }, - }); - } - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - - const client = new Brapi({ apiKey: 'My API Key', fetch: testFetch }); - - expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); - expect(count).toEqual(2); - expect( - await client - .request({ path: '/foo', method: 'get' }) - .asResponse() - .then((r) => r.text()), - ).toEqual(JSON.stringify({ a: 1 })); - expect(count).toEqual(3); - }); - - test('retry on 429 with retry-after-ms', async () => { - let count = 0; - const testFetch = async ( - url: string | URL | Request, - { signal }: RequestInit = {}, - ): Promise => { - if (count++ === 0) { - return new Response(undefined, { - status: 429, - headers: { - 'Retry-After-Ms': '10', - }, - }); - } - return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); - }; - - const client = new Brapi({ apiKey: 'My API Key', fetch: testFetch }); - - expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); - expect(count).toEqual(2); - expect( - await client - .request({ path: '/foo', method: 'get' }) - .asResponse() - .then((r) => r.text()), - ).toEqual(JSON.stringify({ a: 1 })); - expect(count).toEqual(3); - }); -}); diff --git a/tests/path.test.ts b/tests/path.test.ts deleted file mode 100644 index 4edb05f..0000000 --- a/tests/path.test.ts +++ /dev/null @@ -1,462 +0,0 @@ -import { createPathTagFunction, encodeURIPath } from 'brapi/internal/utils/path'; -import { inspect } from 'node:util'; -import { runInNewContext } from 'node:vm'; - -describe('path template tag function', () => { - test('validates input', () => { - const testParams = ['', '.', '..', 'x', '%2e', '%2E', '%2e%2e', '%2E%2e', '%2e%2E', '%2E%2E']; - const testCases = [ - ['/path_params/', '/a'], - ['/path_params/', '/'], - ['/path_params/', ''], - ['', '/a'], - ['', '/'], - ['', ''], - ['a'], - [''], - ['/path_params/', ':initiate'], - ['/path_params/', '.json'], - ['/path_params/', '?beta=true'], - ['/path_params/', '.?beta=true'], - ['/path_params/', '/', '/download'], - ['/path_params/', '-', '/download'], - ['/path_params/', '', '/download'], - ['/path_params/', '.', '/download'], - ['/path_params/', '..', '/download'], - ['/plain/path'], - ]; - - function paramPermutations(len: number): string[][] { - if (len === 0) return []; - if (len === 1) return testParams.map((e) => [e]); - const rest = paramPermutations(len - 1); - return testParams.flatMap((e) => rest.map((r) => [e, ...r])); - } - - // We need to test how %2E is handled, so we use a custom encoder that does no escaping. - const rawPath = createPathTagFunction((s) => s); - - const emptyObject = {}; - const mathObject = Math; - const numberObject = new Number(); - const stringObject = new String(); - const basicClass = new (class {})(); - const classWithToString = new (class { - toString() { - return 'ok'; - } - })(); - - // Invalid values - expect(() => rawPath`/a/${null}/b`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Null is not a valid path parameter\n' + - '/a/null/b\n' + - ' ^^^^', - ); - expect(() => rawPath`/a/${undefined}/b`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Undefined is not a valid path parameter\n' + - '/a/undefined/b\n' + - ' ^^^^^^^^^', - ); - expect(() => rawPath`/a/${emptyObject}/b`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Object is not a valid path parameter\n' + - '/a/[object Object]/b\n' + - ' ^^^^^^^^^^^^^^^', - ); - expect(() => rawPath`?${mathObject}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Math is not a valid path parameter\n' + - '?[object Math]\n' + - ' ^^^^^^^^^^^^^', - ); - expect(() => rawPath`/${basicClass}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Object is not a valid path parameter\n' + - '/[object Object]\n' + - ' ^^^^^^^^^^^^^^', - ); - expect(() => rawPath`/../${''}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value ".." can\'t be safely passed as a path parameter\n' + - '/../\n' + - ' ^^', - ); - expect(() => rawPath`/../${{}}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value ".." can\'t be safely passed as a path parameter\n' + - 'Value of type Object is not a valid path parameter\n' + - '/../[object Object]\n' + - ' ^^ ^^^^^^^^^^^^^^', - ); - - // Valid values - expect(rawPath`/${0}`).toBe('/0'); - expect(rawPath`/${''}`).toBe('/'); - expect(rawPath`/${numberObject}`).toBe('/0'); - expect(rawPath`${stringObject}/`).toBe('/'); - expect(rawPath`/${classWithToString}`).toBe('/ok'); - - // We need to check what happens with cross-realm values, which we might get from - // Jest or other frames in a browser. - - const newRealm = runInNewContext('globalThis'); - expect(newRealm.Object).not.toBe(Object); - - const crossRealmObject = newRealm.Object(); - const crossRealmMathObject = newRealm.Math; - const crossRealmNumber = new newRealm.Number(); - const crossRealmString = new newRealm.String(); - const crossRealmClass = new (class extends newRealm.Object {})(); - const crossRealmClassWithToString = new (class extends newRealm.Object { - toString() { - return 'ok'; - } - })(); - - // Invalid cross-realm values - expect(() => rawPath`/a/${crossRealmObject}/b`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Object is not a valid path parameter\n' + - '/a/[object Object]/b\n' + - ' ^^^^^^^^^^^^^^^', - ); - expect(() => rawPath`?${crossRealmMathObject}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Math is not a valid path parameter\n' + - '?[object Math]\n' + - ' ^^^^^^^^^^^^^', - ); - expect(() => rawPath`/${crossRealmClass}`).toThrow( - 'Path parameters result in path with invalid segments:\n' + - 'Value of type Object is not a valid path parameter\n' + - '/[object Object]\n' + - ' ^^^^^^^^^^^^^^^', - ); - - // Valid cross-realm values - expect(rawPath`/${crossRealmNumber}`).toBe('/0'); - expect(rawPath`${crossRealmString}/`).toBe('/'); - expect(rawPath`/${crossRealmClassWithToString}`).toBe('/ok'); - - const results: { - [pathParts: string]: { - [params: string]: { valid: boolean; result?: string; error?: string }; - }; - } = {}; - - for (const pathParts of testCases) { - const pathResults: Record = {}; - results[JSON.stringify(pathParts)] = pathResults; - for (const params of paramPermutations(pathParts.length - 1)) { - const stringRaw = String.raw({ raw: pathParts }, ...params); - const plainString = String.raw( - { raw: pathParts.map((e) => e.replace(/\./g, 'x')) }, - ...params.map((e) => 'X'.repeat(e.length)), - ); - const normalizedStringRaw = new URL(stringRaw, 'https://example.com').href; - const normalizedPlainString = new URL(plainString, 'https://example.com').href; - const pathResultsKey = JSON.stringify(params); - try { - const result = rawPath(pathParts, ...params); - expect(result).toBe(stringRaw); - // there are no special segments, so the length of the normalized path is - // equal to the length of the normalized plain path. - expect(normalizedStringRaw.length).toBe(normalizedPlainString.length); - pathResults[pathResultsKey] = { - valid: true, - result, - }; - } catch (e) { - const error = String(e); - expect(error).toMatch(/Path parameters result in path with invalid segment/); - // there are special segments, so the length of the normalized path is - // different than the length of the normalized plain path. - expect(normalizedStringRaw.length).not.toBe(normalizedPlainString.length); - pathResults[pathResultsKey] = { - valid: false, - error, - }; - } - } - } - - expect(results).toMatchObject({ - '["/path_params/","/a"]': { - '["x"]': { valid: true, result: '/path_params/x/a' }, - '[""]': { valid: true, result: '/path_params//a' }, - '["%2E%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E%2e" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E%2e/a\n' + - ' ^^^^^^', - }, - '["%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E/a\n' + - ' ^^^', - }, - }, - '["/path_params/","/"]': { - '["x"]': { valid: true, result: '/path_params/x/' }, - '[""]': { valid: true, result: '/path_params//' }, - '["%2e%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2e%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2e%2E/\n' + - ' ^^^^^^', - }, - '["%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2e" can\'t be safely passed as a path parameter\n' + - '/path_params/%2e/\n' + - ' ^^^', - }, - }, - '["/path_params/",""]': { - '[""]': { valid: true, result: '/path_params/' }, - '["x"]': { valid: true, result: '/path_params/x' }, - '["%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E\n' + - ' ^^^', - }, - '["%2E%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E%2e" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E%2e\n' + - ' ^^^^^^', - }, - }, - '["","/a"]': { - '[""]': { valid: true, result: '/a' }, - '["x"]': { valid: true, result: 'x/a' }, - '["%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E" can\'t be safely passed as a path parameter\n%2E/a\n^^^', - }, - '["%2e%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2e%2E" can\'t be safely passed as a path parameter\n' + - '%2e%2E/a\n' + - '^^^^^^', - }, - }, - '["","/"]': { - '["x"]': { valid: true, result: 'x/' }, - '[""]': { valid: true, result: '/' }, - '["%2E%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E%2e" can\'t be safely passed as a path parameter\n' + - '%2E%2e/\n' + - '^^^^^^', - }, - '["."]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "." can\'t be safely passed as a path parameter\n' + - './\n^', - }, - }, - '["",""]': { - '[""]': { valid: true, result: '' }, - '["x"]': { valid: true, result: 'x' }, - '[".."]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value ".." can\'t be safely passed as a path parameter\n' + - '..\n^^', - }, - '["."]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "." can\'t be safely passed as a path parameter\n' + - '.\n^', - }, - }, - '["a"]': {}, - '[""]': {}, - '["/path_params/",":initiate"]': { - '[""]': { valid: true, result: '/path_params/:initiate' }, - '["."]': { valid: true, result: '/path_params/.:initiate' }, - }, - '["/path_params/",".json"]': { - '["x"]': { valid: true, result: '/path_params/x.json' }, - '["."]': { valid: true, result: '/path_params/..json' }, - }, - '["/path_params/","?beta=true"]': { - '["x"]': { valid: true, result: '/path_params/x?beta=true' }, - '[""]': { valid: true, result: '/path_params/?beta=true' }, - '["%2E%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E%2E?beta=true\n' + - ' ^^^^^^', - }, - '["%2e%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2e%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2e%2E?beta=true\n' + - ' ^^^^^^', - }, - }, - '["/path_params/",".?beta=true"]': { - '[".."]': { valid: true, result: '/path_params/...?beta=true' }, - '["x"]': { valid: true, result: '/path_params/x.?beta=true' }, - '[""]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "." can\'t be safely passed as a path parameter\n' + - '/path_params/.?beta=true\n' + - ' ^', - }, - '["%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2e." can\'t be safely passed as a path parameter\n' + - '/path_params/%2e.?beta=true\n' + - ' ^^^^', - }, - }, - '["/path_params/","/","/download"]': { - '["",""]': { valid: true, result: '/path_params///download' }, - '["","x"]': { valid: true, result: '/path_params//x/download' }, - '[".","%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "." can\'t be safely passed as a path parameter\n' + - 'Value "%2e" can\'t be safely passed as a path parameter\n' + - '/path_params/./%2e/download\n' + - ' ^ ^^^', - }, - '["%2E%2e","%2e"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E%2e" can\'t be safely passed as a path parameter\n' + - 'Value "%2e" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E%2e/%2e/download\n' + - ' ^^^^^^ ^^^', - }, - }, - '["/path_params/","-","/download"]': { - '["","%2e"]': { valid: true, result: '/path_params/-%2e/download' }, - '["%2E",".."]': { valid: true, result: '/path_params/%2E-../download' }, - }, - '["/path_params/","","/download"]': { - '["%2E%2e","%2e%2E"]': { valid: true, result: '/path_params/%2E%2e%2e%2E/download' }, - '["%2E",".."]': { valid: true, result: '/path_params/%2E../download' }, - '["","%2E"]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E" can\'t be safely passed as a path parameter\n' + - '/path_params/%2E/download\n' + - ' ^^^', - }, - '["%2E","."]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "%2E." can\'t be safely passed as a path parameter\n' + - '/path_params/%2E./download\n' + - ' ^^^^', - }, - }, - '["/path_params/",".","/download"]': { - '["%2e%2e",""]': { valid: true, result: '/path_params/%2e%2e./download' }, - '["","%2e%2e"]': { valid: true, result: '/path_params/.%2e%2e/download' }, - '["",""]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value "." can\'t be safely passed as a path parameter\n' + - '/path_params/./download\n' + - ' ^', - }, - '["","."]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value ".." can\'t be safely passed as a path parameter\n' + - '/path_params/../download\n' + - ' ^^', - }, - }, - '["/path_params/","..","/download"]': { - '["","%2E"]': { valid: true, result: '/path_params/..%2E/download' }, - '["","x"]': { valid: true, result: '/path_params/..x/download' }, - '["",""]': { - valid: false, - error: - 'Error: Path parameters result in path with invalid segments:\n' + - 'Value ".." can\'t be safely passed as a path parameter\n' + - '/path_params/../download\n' + - ' ^^', - }, - }, - }); - }); -}); - -describe('encodeURIPath', () => { - const testCases: string[] = [ - '', - // Every ASCII character - ...Array.from({ length: 0x7f }, (_, i) => String.fromCharCode(i)), - // Unicode BMP codepoint - 'å', - // Unicode supplementary codepoint - '😃', - ]; - - for (const param of testCases) { - test('properly encodes ' + inspect(param), () => { - const encoded = encodeURIPath(param); - const naiveEncoded = encodeURIComponent(param); - // we should never encode more characters than encodeURIComponent - expect(naiveEncoded.length).toBeGreaterThanOrEqual(encoded.length); - expect(decodeURIComponent(encoded)).toBe(param); - }); - } - - test("leaves ':' intact", () => { - expect(encodeURIPath(':')).toBe(':'); - }); - - test("leaves '@' intact", () => { - expect(encodeURIPath('@')).toBe('@'); - }); -}); diff --git a/tests/qs/empty-keys-cases.ts b/tests/qs/empty-keys-cases.ts deleted file mode 100644 index ea2c1b0..0000000 --- a/tests/qs/empty-keys-cases.ts +++ /dev/null @@ -1,271 +0,0 @@ -export const empty_test_cases = [ - { - input: '&', - with_empty_keys: {}, - stringify_output: { - brackets: '', - indices: '', - repeat: '', - }, - no_empty_keys: {}, - }, - { - input: '&&', - with_empty_keys: {}, - stringify_output: { - brackets: '', - indices: '', - repeat: '', - }, - no_empty_keys: {}, - }, - { - input: '&=', - with_empty_keys: { '': '' }, - stringify_output: { - brackets: '=', - indices: '=', - repeat: '=', - }, - no_empty_keys: {}, - }, - { - input: '&=&', - with_empty_keys: { '': '' }, - stringify_output: { - brackets: '=', - indices: '=', - repeat: '=', - }, - no_empty_keys: {}, - }, - { - input: '&=&=', - with_empty_keys: { '': ['', ''] }, - stringify_output: { - brackets: '[]=&[]=', - indices: '[0]=&[1]=', - repeat: '=&=', - }, - no_empty_keys: {}, - }, - { - input: '&=&=&', - with_empty_keys: { '': ['', ''] }, - stringify_output: { - brackets: '[]=&[]=', - indices: '[0]=&[1]=', - repeat: '=&=', - }, - no_empty_keys: {}, - }, - { - input: '=', - with_empty_keys: { '': '' }, - no_empty_keys: {}, - stringify_output: { - brackets: '=', - indices: '=', - repeat: '=', - }, - }, - { - input: '=&', - with_empty_keys: { '': '' }, - stringify_output: { - brackets: '=', - indices: '=', - repeat: '=', - }, - no_empty_keys: {}, - }, - { - input: '=&&&', - with_empty_keys: { '': '' }, - stringify_output: { - brackets: '=', - indices: '=', - repeat: '=', - }, - no_empty_keys: {}, - }, - { - input: '=&=&=&', - with_empty_keys: { '': ['', '', ''] }, - stringify_output: { - brackets: '[]=&[]=&[]=', - indices: '[0]=&[1]=&[2]=', - repeat: '=&=&=', - }, - no_empty_keys: {}, - }, - { - input: '=&a[]=b&a[1]=c', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: '=a', - with_empty_keys: { '': 'a' }, - no_empty_keys: {}, - stringify_output: { - brackets: '=a', - indices: '=a', - repeat: '=a', - }, - }, - { - input: 'a==a', - with_empty_keys: { a: '=a' }, - no_empty_keys: { a: '=a' }, - stringify_output: { - brackets: 'a==a', - indices: 'a==a', - repeat: 'a==a', - }, - }, - { - input: '=&a[]=b', - with_empty_keys: { '': '', a: ['b'] }, - stringify_output: { - brackets: '=&a[]=b', - indices: '=&a[0]=b', - repeat: '=&a=b', - }, - no_empty_keys: { a: ['b'] }, - }, - { - input: '=&a[]=b&a[]=c&a[2]=d', - with_empty_keys: { '': '', a: ['b', 'c', 'd'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c&a[]=d', - indices: '=&a[0]=b&a[1]=c&a[2]=d', - repeat: '=&a=b&a=c&a=d', - }, - no_empty_keys: { a: ['b', 'c', 'd'] }, - }, - { - input: '=a&=b', - with_empty_keys: { '': ['a', 'b'] }, - stringify_output: { - brackets: '[]=a&[]=b', - indices: '[0]=a&[1]=b', - repeat: '=a&=b', - }, - no_empty_keys: {}, - }, - { - input: '=a&foo=b', - with_empty_keys: { '': 'a', foo: 'b' }, - no_empty_keys: { foo: 'b' }, - stringify_output: { - brackets: '=a&foo=b', - indices: '=a&foo=b', - repeat: '=a&foo=b', - }, - }, - { - input: 'a[]=b&a=c&=', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: 'a[]=b&a=c&=', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: 'a[0]=b&a=c&=', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: 'a=b&a[]=c&=', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: 'a=b&a[0]=c&=', - with_empty_keys: { '': '', a: ['b', 'c'] }, - stringify_output: { - brackets: '=&a[]=b&a[]=c', - indices: '=&a[0]=b&a[1]=c', - repeat: '=&a=b&a=c', - }, - no_empty_keys: { a: ['b', 'c'] }, - }, - { - input: '[]=a&[]=b& []=1', - with_empty_keys: { '': ['a', 'b'], ' ': ['1'] }, - stringify_output: { - brackets: '[]=a&[]=b& []=1', - indices: '[0]=a&[1]=b& [0]=1', - repeat: '=a&=b& =1', - }, - no_empty_keys: { 0: 'a', 1: 'b', ' ': ['1'] }, - }, - { - input: '[0]=a&[1]=b&a[0]=1&a[1]=2', - with_empty_keys: { '': ['a', 'b'], a: ['1', '2'] }, - no_empty_keys: { 0: 'a', 1: 'b', a: ['1', '2'] }, - stringify_output: { - brackets: '[]=a&[]=b&a[]=1&a[]=2', - indices: '[0]=a&[1]=b&a[0]=1&a[1]=2', - repeat: '=a&=b&a=1&a=2', - }, - }, - { - input: '[deep]=a&[deep]=2', - with_empty_keys: { '': { deep: ['a', '2'] } }, - stringify_output: { - brackets: '[deep][]=a&[deep][]=2', - indices: '[deep][0]=a&[deep][1]=2', - repeat: '[deep]=a&[deep]=2', - }, - no_empty_keys: { deep: ['a', '2'] }, - }, - { - input: '%5B0%5D=a&%5B1%5D=b', - with_empty_keys: { '': ['a', 'b'] }, - stringify_output: { - brackets: '[]=a&[]=b', - indices: '[0]=a&[1]=b', - repeat: '=a&=b', - }, - no_empty_keys: { 0: 'a', 1: 'b' }, - }, -] satisfies { - input: string; - with_empty_keys: Record; - stringify_output: { - brackets: string; - indices: string; - repeat: string; - }; - no_empty_keys: Record; -}[]; diff --git a/tests/qs/stringify.test.ts b/tests/qs/stringify.test.ts deleted file mode 100644 index 07baea3..0000000 --- a/tests/qs/stringify.test.ts +++ /dev/null @@ -1,2232 +0,0 @@ -import iconv from 'iconv-lite'; -import { stringify } from 'brapi/internal/qs'; -import { encode } from 'brapi/internal/qs/utils'; -import { StringifyOptions } from 'brapi/internal/qs/types'; -import { empty_test_cases } from './empty-keys-cases'; -import assert from 'assert'; - -describe('stringify()', function () { - test('stringifies a querystring object', function () { - expect(stringify({ a: 'b' })).toBe('a=b'); - expect(stringify({ a: 1 })).toBe('a=1'); - expect(stringify({ a: 1, b: 2 })).toBe('a=1&b=2'); - expect(stringify({ a: 'A_Z' })).toBe('a=A_Z'); - expect(stringify({ a: '€' })).toBe('a=%E2%82%AC'); - expect(stringify({ a: '' })).toBe('a=%EE%80%80'); - expect(stringify({ a: 'א' })).toBe('a=%D7%90'); - expect(stringify({ a: '𐐷' })).toBe('a=%F0%90%90%B7'); - }); - - test('stringifies falsy values', function () { - expect(stringify(undefined)).toBe(''); - expect(stringify(null)).toBe(''); - expect(stringify(null, { strictNullHandling: true })).toBe(''); - expect(stringify(false)).toBe(''); - expect(stringify(0)).toBe(''); - }); - - test('stringifies symbols', function () { - expect(stringify(Symbol.iterator)).toBe(''); - expect(stringify([Symbol.iterator])).toBe('0=Symbol%28Symbol.iterator%29'); - expect(stringify({ a: Symbol.iterator })).toBe('a=Symbol%28Symbol.iterator%29'); - expect(stringify({ a: [Symbol.iterator] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe( - 'a[]=Symbol%28Symbol.iterator%29', - ); - }); - - test('stringifies bigints', function () { - var three = BigInt(3); - // @ts-expect-error - var encodeWithN = function (value, defaultEncoder, charset) { - var result = defaultEncoder(value, defaultEncoder, charset); - return typeof value === 'bigint' ? result + 'n' : result; - }; - - expect(stringify(three)).toBe(''); - expect(stringify([three])).toBe('0=3'); - expect(stringify([three], { encoder: encodeWithN })).toBe('0=3n'); - expect(stringify({ a: three })).toBe('a=3'); - expect(stringify({ a: three }, { encoder: encodeWithN })).toBe('a=3n'); - expect(stringify({ a: [three] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe('a[]=3'); - expect( - stringify({ a: [three] }, { encodeValuesOnly: true, encoder: encodeWithN, arrayFormat: 'brackets' }), - ).toBe('a[]=3n'); - }); - - test('encodes dot in key of object when encodeDotInKeys and allowDots is provided', function () { - expect( - stringify({ 'name.obj': { first: 'John', last: 'Doe' } }, { allowDots: false, encodeDotInKeys: false }), - ).toBe('name.obj%5Bfirst%5D=John&name.obj%5Blast%5D=Doe'); - expect( - stringify({ 'name.obj': { first: 'John', last: 'Doe' } }, { allowDots: true, encodeDotInKeys: false }), - ).toBe('name.obj.first=John&name.obj.last=Doe'); - expect( - stringify({ 'name.obj': { first: 'John', last: 'Doe' } }, { allowDots: false, encodeDotInKeys: true }), - ).toBe('name%252Eobj%5Bfirst%5D=John&name%252Eobj%5Blast%5D=Doe'); - expect( - stringify({ 'name.obj': { first: 'John', last: 'Doe' } }, { allowDots: true, encodeDotInKeys: true }), - ).toBe('name%252Eobj.first=John&name%252Eobj.last=Doe'); - - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { allowDots: false, encodeDotInKeys: false }, - // ), - // 'name.obj.subobject%5Bfirst.godly.name%5D=John&name.obj.subobject%5Blast%5D=Doe', - // 'with allowDots false and encodeDotInKeys false', - // ); - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { allowDots: true, encodeDotInKeys: false }, - // ), - // 'name.obj.subobject.first.godly.name=John&name.obj.subobject.last=Doe', - // 'with allowDots false and encodeDotInKeys false', - // ); - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { allowDots: false, encodeDotInKeys: true }, - // ), - // 'name%252Eobj%252Esubobject%5Bfirst.godly.name%5D=John&name%252Eobj%252Esubobject%5Blast%5D=Doe', - // 'with allowDots false and encodeDotInKeys true', - // ); - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { allowDots: true, encodeDotInKeys: true }, - // ), - // 'name%252Eobj%252Esubobject.first%252Egodly%252Ename=John&name%252Eobj%252Esubobject.last=Doe', - // 'with allowDots true and encodeDotInKeys true', - // ); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { allowDots: false, encodeDotInKeys: false }, - ), - ).toBe('name.obj.subobject%5Bfirst.godly.name%5D=John&name.obj.subobject%5Blast%5D=Doe'); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { allowDots: true, encodeDotInKeys: false }, - ), - ).toBe('name.obj.subobject.first.godly.name=John&name.obj.subobject.last=Doe'); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { allowDots: false, encodeDotInKeys: true }, - ), - ).toBe('name%252Eobj%252Esubobject%5Bfirst.godly.name%5D=John&name%252Eobj%252Esubobject%5Blast%5D=Doe'); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { allowDots: true, encodeDotInKeys: true }, - ), - ).toBe('name%252Eobj%252Esubobject.first%252Egodly%252Ename=John&name%252Eobj%252Esubobject.last=Doe'); - }); - - test('should encode dot in key of object, and automatically set allowDots to `true` when encodeDotInKeys is true and allowDots in undefined', function () { - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { encodeDotInKeys: true }, - // ), - // 'name%252Eobj%252Esubobject.first%252Egodly%252Ename=John&name%252Eobj%252Esubobject.last=Doe', - // 'with allowDots undefined and encodeDotInKeys true', - // ); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { encodeDotInKeys: true }, - ), - ).toBe('name%252Eobj%252Esubobject.first%252Egodly%252Ename=John&name%252Eobj%252Esubobject.last=Doe'); - }); - - test('should encode dot in key of object when encodeDotInKeys and allowDots is provided, and nothing else when encodeValuesOnly is provided', function () { - // st.equal( - // stringify( - // { 'name.obj': { first: 'John', last: 'Doe' } }, - // { - // encodeDotInKeys: true, - // allowDots: true, - // encodeValuesOnly: true, - // }, - // ), - // 'name%2Eobj.first=John&name%2Eobj.last=Doe', - // ); - expect( - stringify( - { 'name.obj': { first: 'John', last: 'Doe' } }, - { - encodeDotInKeys: true, - allowDots: true, - encodeValuesOnly: true, - }, - ), - ).toBe('name%2Eobj.first=John&name%2Eobj.last=Doe'); - - // st.equal( - // stringify( - // { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - // { allowDots: true, encodeDotInKeys: true, encodeValuesOnly: true }, - // ), - // 'name%2Eobj%2Esubobject.first%2Egodly%2Ename=John&name%2Eobj%2Esubobject.last=Doe', - // ); - expect( - stringify( - { 'name.obj.subobject': { 'first.godly.name': 'John', last: 'Doe' } }, - { allowDots: true, encodeDotInKeys: true, encodeValuesOnly: true }, - ), - ).toBe('name%2Eobj%2Esubobject.first%2Egodly%2Ename=John&name%2Eobj%2Esubobject.last=Doe'); - }); - - test('throws when `commaRoundTrip` is not a boolean', function () { - // st['throws']( - // function () { - // stringify({}, { commaRoundTrip: 'not a boolean' }); - // }, - // TypeError, - // 'throws when `commaRoundTrip` is not a boolean', - // ); - expect(() => { - // @ts-expect-error - stringify({}, { commaRoundTrip: 'not a boolean' }); - }).toThrow(TypeError); - }); - - test('throws when `encodeDotInKeys` is not a boolean', function () { - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { encodeDotInKeys: 'foobar' }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { encodeDotInKeys: 'foobar' }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { encodeDotInKeys: 0 }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { encodeDotInKeys: 0 }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { encodeDotInKeys: NaN }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { encodeDotInKeys: NaN }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { encodeDotInKeys: null }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { encodeDotInKeys: null }); - }).toThrow(TypeError); - }); - - test('adds query prefix', function () { - // st.equal(stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b'); - expect(stringify({ a: 'b' }, { addQueryPrefix: true })).toBe('?a=b'); - }); - - test('with query prefix, outputs blank string given an empty object', function () { - // st.equal(stringify({}, { addQueryPrefix: true }), ''); - expect(stringify({}, { addQueryPrefix: true })).toBe(''); - }); - - test('stringifies nested falsy values', function () { - // st.equal(stringify({ a: { b: { c: null } } }), 'a%5Bb%5D%5Bc%5D='); - // st.equal( - // stringify({ a: { b: { c: null } } }, { strictNullHandling: true }), - // 'a%5Bb%5D%5Bc%5D', - // ); - // st.equal(stringify({ a: { b: { c: false } } }), 'a%5Bb%5D%5Bc%5D=false'); - expect(stringify({ a: { b: { c: null } } })).toBe('a%5Bb%5D%5Bc%5D='); - expect(stringify({ a: { b: { c: null } } }, { strictNullHandling: true })).toBe('a%5Bb%5D%5Bc%5D'); - expect(stringify({ a: { b: { c: false } } })).toBe('a%5Bb%5D%5Bc%5D=false'); - }); - - test('stringifies a nested object', function () { - // st.equal(stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); - // st.equal(stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); - expect(stringify({ a: { b: 'c' } })).toBe('a%5Bb%5D=c'); - expect(stringify({ a: { b: { c: { d: 'e' } } } })).toBe('a%5Bb%5D%5Bc%5D%5Bd%5D=e'); - }); - - test('`allowDots` option: stringifies a nested object with dots notation', function () { - // st.equal(stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); - // st.equal(stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); - expect(stringify({ a: { b: 'c' } }, { allowDots: true })).toBe('a.b=c'); - expect(stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true })).toBe('a.b.c.d=e'); - }); - - test('stringifies an array value', function () { - // st.equal( - // stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), - // 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', - // 'indices => indices', - // ); - // st.equal( - // stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), - // 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' }), - // 'a=b%2Cc%2Cd', - // 'comma => comma', - // ); - // st.equal( - // stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma', commaRoundTrip: true }), - // 'a=b%2Cc%2Cd', - // 'comma round trip => comma', - // ); - // st.equal( - // stringify({ a: ['b', 'c', 'd'] }), - // 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', - // 'default => indices', - // ); - expect(stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' })).toBe( - 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', - ); - expect(stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' })).toBe( - 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', - ); - expect(stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' })).toBe('a=b%2Cc%2Cd'); - expect(stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma', commaRoundTrip: true })).toBe( - 'a=b%2Cc%2Cd', - ); - expect(stringify({ a: ['b', 'c', 'd'] })).toBe('a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d'); - }); - - test('`skipNulls` option', function () { - // st.equal( - // stringify({ a: 'b', c: null }, { skipNulls: true }), - // 'a=b', - // 'omits nulls when asked', - // ); - expect(stringify({ a: 'b', c: null }, { skipNulls: true })).toBe('a=b'); - - // st.equal( - // stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), - // 'a%5Bb%5D=c', - // 'omits nested nulls when asked', - // ); - expect(stringify({ a: { b: 'c', d: null } }, { skipNulls: true })).toBe('a%5Bb%5D=c'); - }); - - test('omits array indices when asked', function () { - // st.equal(stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); - expect(stringify({ a: ['b', 'c', 'd'] }, { indices: false })).toBe('a=b&a=c&a=d'); - }); - - test('omits object key/value pair when value is empty array', function () { - // st.equal(stringify({ a: [], b: 'zz' }), 'b=zz'); - expect(stringify({ a: [], b: 'zz' })).toBe('b=zz'); - }); - - test('should not omit object key/value pair when value is empty array and when asked', function () { - // st.equal(stringify({ a: [], b: 'zz' }), 'b=zz'); - // st.equal(stringify({ a: [], b: 'zz' }, { allowEmptyArrays: false }), 'b=zz'); - // st.equal(stringify({ a: [], b: 'zz' }, { allowEmptyArrays: true }), 'a[]&b=zz'); - expect(stringify({ a: [], b: 'zz' })).toBe('b=zz'); - expect(stringify({ a: [], b: 'zz' }, { allowEmptyArrays: false })).toBe('b=zz'); - expect(stringify({ a: [], b: 'zz' }, { allowEmptyArrays: true })).toBe('a[]&b=zz'); - }); - - test('should throw when allowEmptyArrays is not of type boolean', function () { - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { allowEmptyArrays: 'foobar' }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { allowEmptyArrays: 'foobar' }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { allowEmptyArrays: 0 }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { allowEmptyArrays: 0 }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { allowEmptyArrays: NaN }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { allowEmptyArrays: NaN }); - }).toThrow(TypeError); - - // st['throws'](function () { - // stringify({ a: [], b: 'zz' }, { allowEmptyArrays: null }); - // }, TypeError); - expect(() => { - // @ts-expect-error - stringify({ a: [], b: 'zz' }, { allowEmptyArrays: null }); - }).toThrow(TypeError); - }); - - test('allowEmptyArrays + strictNullHandling', function () { - // st.equal( - // stringify({ testEmptyArray: [] }, { strictNullHandling: true, allowEmptyArrays: true }), - // 'testEmptyArray[]', - // ); - expect(stringify({ testEmptyArray: [] }, { strictNullHandling: true, allowEmptyArrays: true })).toBe( - 'testEmptyArray[]', - ); - }); - - describe('stringifies an array value with one item vs multiple items', function () { - test('non-array item', function () { - // s2t.equal( - // stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - // 'a=c', - // ); - // s2t.equal( - // stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - // 'a=c', - // ); - // s2t.equal(stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'comma' }), 'a=c'); - // s2t.equal(stringify({ a: 'c' }, { encodeValuesOnly: true }), 'a=c'); - expect(stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe('a=c'); - expect(stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe('a=c'); - expect(stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'comma' })).toBe('a=c'); - expect(stringify({ a: 'c' }, { encodeValuesOnly: true })).toBe('a=c'); - }); - - test('array with a single item', function () { - // s2t.equal( - // stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - // 'a[0]=c', - // ); - // s2t.equal( - // stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - // 'a[]=c', - // ); - // s2t.equal( - // stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), - // 'a=c', - // ); - // s2t.equal( - // stringify( - // { a: ['c'] }, - // { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }, - // ), - // 'a[]=c', - // ); // so it parses back as an array - // s2t.equal(stringify({ a: ['c'] }, { encodeValuesOnly: true }), 'a[0]=c'); - expect(stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe('a[0]=c'); - expect(stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe('a[]=c'); - expect(stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'comma' })).toBe('a=c'); - expect( - stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }), - ).toBe('a[]=c'); - expect(stringify({ a: ['c'] }, { encodeValuesOnly: true })).toBe('a[0]=c'); - }); - - test('array with multiple items', function () { - // s2t.equal( - // stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - // 'a[0]=c&a[1]=d', - // ); - // s2t.equal( - // stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - // 'a[]=c&a[]=d', - // ); - // s2t.equal( - // stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), - // 'a=c,d', - // ); - // s2t.equal( - // stringify( - // { a: ['c', 'd'] }, - // { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }, - // ), - // 'a=c,d', - // ); - // s2t.equal(stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true }), 'a[0]=c&a[1]=d'); - expect(stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe( - 'a[0]=c&a[1]=d', - ); - expect(stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe( - 'a[]=c&a[]=d', - ); - expect(stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'comma' })).toBe('a=c,d'); - expect( - stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }), - ).toBe('a=c,d'); - expect(stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true })).toBe('a[0]=c&a[1]=d'); - }); - - test('array with multiple items with a comma inside', function () { - // s2t.equal( - // stringify({ a: ['c,d', 'e'] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), - // 'a=c%2Cd,e', - // ); - // s2t.equal(stringify({ a: ['c,d', 'e'] }, { arrayFormat: 'comma' }), 'a=c%2Cd%2Ce'); - expect(stringify({ a: ['c,d', 'e'] }, { encodeValuesOnly: true, arrayFormat: 'comma' })).toBe( - 'a=c%2Cd,e', - ); - expect(stringify({ a: ['c,d', 'e'] }, { arrayFormat: 'comma' })).toBe('a=c%2Cd%2Ce'); - - // s2t.equal( - // stringify( - // { a: ['c,d', 'e'] }, - // { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }, - // ), - // 'a=c%2Cd,e', - // ); - // s2t.equal( - // stringify({ a: ['c,d', 'e'] }, { arrayFormat: 'comma', commaRoundTrip: true }), - // 'a=c%2Cd%2Ce', - // ); - expect( - stringify( - { a: ['c,d', 'e'] }, - { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }, - ), - ).toBe('a=c%2Cd,e'); - expect(stringify({ a: ['c,d', 'e'] }, { arrayFormat: 'comma', commaRoundTrip: true })).toBe( - 'a=c%2Cd%2Ce', - ); - }); - }); - - test('stringifies a nested array value', function () { - expect(stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe( - 'a[b][0]=c&a[b][1]=d', - ); - expect(stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe( - 'a[b][]=c&a[b][]=d', - ); - expect(stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'comma' })).toBe( - 'a[b]=c,d', - ); - expect(stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true })).toBe('a[b][0]=c&a[b][1]=d'); - }); - - test('stringifies comma and empty array values', function () { - // st.equal( - // stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'indices' }), - // 'a[0]=,&a[1]=&a[2]=c,d%', - // ); - // st.equal( - // stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'brackets' }), - // 'a[]=,&a[]=&a[]=c,d%', - // ); - // st.equal( - // stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'comma' }), - // 'a=,,,c,d%', - // ); - // st.equal( - // stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'repeat' }), - // 'a=,&a=&a=c,d%', - // ); - expect(stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'indices' })).toBe( - 'a[0]=,&a[1]=&a[2]=c,d%', - ); - expect(stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'brackets' })).toBe( - 'a[]=,&a[]=&a[]=c,d%', - ); - expect(stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'comma' })).toBe('a=,,,c,d%'); - expect(stringify({ a: [',', '', 'c,d%'] }, { encode: false, arrayFormat: 'repeat' })).toBe( - 'a=,&a=&a=c,d%', - ); - - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a[0]=%2C&a[1]=&a[2]=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a[]=%2C&a[]=&a[]=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'comma' }, - // ), - // 'a=%2C,,c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a=%2C&a=&a=c%2Cd%25', - // ); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'indices' }), - ).toBe('a%5B0%5D=%2C&a%5B1%5D=&a%5B2%5D=c%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe('a[]=%2C&a[]=&a[]=c%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'comma' }), - ).toBe('a=%2C%2C%2Cc%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }), - ).toBe('a=%2C&a=&a=c%2Cd%25'); - - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'indices' }, - // ), - // 'a%5B0%5D=%2C&a%5B1%5D=&a%5B2%5D=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'brackets' }, - // ), - // 'a%5B%5D=%2C&a%5B%5D=&a%5B%5D=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'comma' }, - // ), - // 'a=%2C%2C%2Cc%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: [',', '', 'c,d%'] }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }, - // ), - // 'a=%2C&a=&a=c%2Cd%25', - // ); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }), - ).toBe('a=%2C&a=&a=c%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'indices' }), - ).toBe('a%5B0%5D=%2C&a%5B1%5D=&a%5B2%5D=c%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe('a[]=%2C&a[]=&a[]=c%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'comma' }), - ).toBe('a=%2C%2C%2Cc%2Cd%25'); - expect( - stringify({ a: [',', '', 'c,d%'] }, { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }), - ).toBe('a=%2C&a=&a=c%2Cd%25'); - }); - - test('stringifies comma and empty non-array values', function () { - // st.equal( - // stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'indices' }), - // 'a=,&b=&c=c,d%', - // ); - // st.equal( - // stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'brackets' }), - // 'a=,&b=&c=c,d%', - // ); - // st.equal( - // stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'comma' }), - // 'a=,&b=&c=c,d%', - // ); - // st.equal( - // stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'repeat' }), - // 'a=,&b=&c=c,d%', - // ); - expect(stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'indices' })).toBe( - 'a=,&b=&c=c,d%', - ); - expect(stringify({ a: ',', b: '', c: 'c,d%' }, { encode: false, arrayFormat: 'brackets' })).toBe( - 'a=,&b=&c=c,d%', - ); - - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'comma' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: true, arrayFormat: 'indices' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: true, arrayFormat: 'brackets' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify({ a: ',', b: '', c: 'c,d%' }, { encode: true, encodeValuesOnly: true, arrayFormat: 'comma' }), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: true, arrayFormat: 'repeat' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'indices' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'brackets' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'comma' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - // st.equal( - // stringify( - // { a: ',', b: '', c: 'c,d%' }, - // { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }, - // ), - // 'a=%2C&b=&c=c%2Cd%25', - // ); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: false, arrayFormat: 'indices' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: false, arrayFormat: 'brackets' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: false, arrayFormat: 'comma' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - expect( - stringify( - { a: ',', b: '', c: 'c,d%' }, - { encode: true, encodeValuesOnly: false, arrayFormat: 'repeat' }, - ), - ).toBe('a=%2C&b=&c=c%2Cd%25'); - }); - - test('stringifies a nested array value with dots notation', function () { - // st.equal( - // stringify( - // { a: { b: ['c', 'd'] } }, - // { allowDots: true, encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a.b[0]=c&a.b[1]=d', - // 'indices: stringifies with dots + indices', - // ); - // st.equal( - // stringify( - // { a: { b: ['c', 'd'] } }, - // { allowDots: true, encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a.b[]=c&a.b[]=d', - // 'brackets: stringifies with dots + brackets', - // ); - // st.equal( - // stringify( - // { a: { b: ['c', 'd'] } }, - // { allowDots: true, encodeValuesOnly: true, arrayFormat: 'comma' }, - // ), - // 'a.b=c,d', - // 'comma: stringifies with dots + comma', - // ); - // st.equal( - // stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encodeValuesOnly: true }), - // 'a.b[0]=c&a.b[1]=d', - // 'default: stringifies with dots + indices', - // ); - expect( - stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encodeValuesOnly: true, arrayFormat: 'indices' }, - ), - ).toBe('a.b[0]=c&a.b[1]=d'); - expect( - stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encodeValuesOnly: true, arrayFormat: 'brackets' }, - ), - ).toBe('a.b[]=c&a.b[]=d'); - expect( - stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encodeValuesOnly: true, arrayFormat: 'comma' }), - ).toBe('a.b=c,d'); - expect(stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encodeValuesOnly: true })).toBe( - 'a.b[0]=c&a.b[1]=d', - ); - }); - - test('stringifies an object inside an array', function () { - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices', encodeValuesOnly: true }), - // 'a[0][b]=c', - // 'indices => indices', - // ); - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'repeat', encodeValuesOnly: true }), - // 'a[b]=c', - // 'repeat => repeat', - // ); - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets', encodeValuesOnly: true }), - // 'a[][b]=c', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { encodeValuesOnly: true }), - // 'a[0][b]=c', - // 'default => indices', - // ); - expect(stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices', encodeValuesOnly: true })).toBe( - 'a[0][b]=c', - ); - expect(stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'repeat', encodeValuesOnly: true })).toBe('a[b]=c'); - expect(stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets', encodeValuesOnly: true })).toBe( - 'a[][b]=c', - ); - expect(stringify({ a: [{ b: 'c' }] }, { encodeValuesOnly: true })).toBe('a[0][b]=c'); - - // st.equal( - // stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices', encodeValuesOnly: true }), - // 'a[0][b][c][0]=1', - // 'indices => indices', - // ); - // st.equal( - // stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'repeat', encodeValuesOnly: true }), - // 'a[b][c]=1', - // 'repeat => repeat', - // ); - // st.equal( - // stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets', encodeValuesOnly: true }), - // 'a[][b][c][]=1', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: [{ b: { c: [1] } }] }, { encodeValuesOnly: true }), - // 'a[0][b][c][0]=1', - // 'default => indices', - // ); - expect(stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices', encodeValuesOnly: true })).toBe( - 'a[0][b][c][0]=1', - ); - expect(stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'repeat', encodeValuesOnly: true })).toBe( - 'a[b][c]=1', - ); - expect(stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets', encodeValuesOnly: true })).toBe( - 'a[][b][c][]=1', - ); - expect(stringify({ a: [{ b: { c: [1] } }] }, { encodeValuesOnly: true })).toBe('a[0][b][c][0]=1'); - }); - - test('stringifies an array with mixed objects and primitives', function () { - // st.equal( - // stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - // 'a[0][b]=1&a[1]=2&a[2]=3', - // 'indices => indices', - // ); - // st.equal( - // stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - // 'a[][b]=1&a[]=2&a[]=3', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), - // '???', - // 'brackets => brackets', - // { skip: 'TODO: figure out what this should do' }, - // ); - // st.equal( - // stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true }), - // 'a[0][b]=1&a[1]=2&a[2]=3', - // 'default => indices', - // ); - expect(stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe( - 'a[0][b]=1&a[1]=2&a[2]=3', - ); - expect(stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe( - 'a[][b]=1&a[]=2&a[]=3', - ); - // !Skipped: Figure out what this should do - // expect( - // stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), - // ).toBe('???'); - expect(stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true })).toBe('a[0][b]=1&a[1]=2&a[2]=3'); - }); - - test('stringifies an object inside an array with dots notation', function () { - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false, arrayFormat: 'indices' }), - // 'a[0].b=c', - // 'indices => indices', - // ); - // st.equal( - // stringify( - // { a: [{ b: 'c' }] }, - // { allowDots: true, encode: false, arrayFormat: 'brackets' }, - // ), - // 'a[].b=c', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false }), - // 'a[0].b=c', - // 'default => indices', - // ); - expect(stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false, arrayFormat: 'indices' })).toBe( - 'a[0].b=c', - ); - expect(stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false, arrayFormat: 'brackets' })).toBe( - 'a[].b=c', - ); - expect(stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false })).toBe('a[0].b=c'); - - // st.equal( - // stringify( - // { a: [{ b: { c: [1] } }] }, - // { allowDots: true, encode: false, arrayFormat: 'indices' }, - // ), - // 'a[0].b.c[0]=1', - // 'indices => indices', - // ); - // st.equal( - // stringify( - // { a: [{ b: { c: [1] } }] }, - // { allowDots: true, encode: false, arrayFormat: 'brackets' }, - // ), - // 'a[].b.c[]=1', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false }), - // 'a[0].b.c[0]=1', - // 'default => indices', - // ); - expect( - stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false, arrayFormat: 'indices' }), - ).toBe('a[0].b.c[0]=1'); - expect( - stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false, arrayFormat: 'brackets' }), - ).toBe('a[].b.c[]=1'); - expect(stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false })).toBe('a[0].b.c[0]=1'); - }); - - test('does not omit object keys when indices = false', function () { - // st.equal(stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); - expect(stringify({ a: [{ b: 'c' }] }, { indices: false })).toBe('a%5Bb%5D=c'); - }); - - test('uses indices notation for arrays when indices=true', function () { - // st.equal(stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); - expect(stringify({ a: ['b', 'c'] }, { indices: true })).toBe('a%5B0%5D=b&a%5B1%5D=c'); - }); - - test('uses indices notation for arrays when no arrayFormat is specified', function () { - // st.equal(stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); - expect(stringify({ a: ['b', 'c'] })).toBe('a%5B0%5D=b&a%5B1%5D=c'); - }); - - test('uses indices notation for arrays when arrayFormat=indices', function () { - // st.equal(stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); - expect(stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' })).toBe('a%5B0%5D=b&a%5B1%5D=c'); - }); - - test('uses repeat notation for arrays when arrayFormat=repeat', function () { - // st.equal(stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); - expect(stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' })).toBe('a=b&a=c'); - }); - - test('uses brackets notation for arrays when arrayFormat=brackets', function () { - // st.equal(stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); - expect(stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' })).toBe('a%5B%5D=b&a%5B%5D=c'); - }); - - test('stringifies a complicated object', function () { - // st.equal(stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); - expect(stringify({ a: { b: 'c', d: 'e' } })).toBe('a%5Bb%5D=c&a%5Bd%5D=e'); - }); - - test('stringifies an empty value', function () { - // st.equal(stringify({ a: '' }), 'a='); - // st.equal(stringify({ a: null }, { strictNullHandling: true }), 'a'); - expect(stringify({ a: '' })).toBe('a='); - expect(stringify({ a: null }, { strictNullHandling: true })).toBe('a'); - - // st.equal(stringify({ a: '', b: '' }), 'a=&b='); - // st.equal(stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); - expect(stringify({ a: '', b: '' })).toBe('a=&b='); - expect(stringify({ a: null, b: '' }, { strictNullHandling: true })).toBe('a&b='); - - // st.equal(stringify({ a: { b: '' } }), 'a%5Bb%5D='); - // st.equal(stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); - // st.equal(stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); - expect(stringify({ a: { b: '' } })).toBe('a%5Bb%5D='); - expect(stringify({ a: { b: null } }, { strictNullHandling: true })).toBe('a%5Bb%5D'); - expect(stringify({ a: { b: null } }, { strictNullHandling: false })).toBe('a%5Bb%5D='); - }); - - test('stringifies an empty array in different arrayFormat', function () { - // st.equal(stringify({ a: [], b: [null], c: 'c' }, { encode: false }), 'b[0]=&c=c'); - expect(stringify({ a: [], b: [null], c: 'c' }, { encode: false })).toBe('b[0]=&c=c'); - // arrayFormat default - // st.equal( - // stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices' }), - // 'b[0]=&c=c', - // ); - // st.equal( - // stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets' }), - // 'b[]=&c=c', - // ); - // st.equal( - // stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat' }), - // 'b=&c=c', - // ); - // st.equal( - // stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma' }), - // 'b=&c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'comma', commaRoundTrip: true }, - // ), - // 'b[]=&c=c', - // ); - expect(stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices' })).toBe( - 'b[0]=&c=c', - ); - expect(stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets' })).toBe( - 'b[]=&c=c', - ); - expect(stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat' })).toBe('b=&c=c'); - expect(stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma' })).toBe('b=&c=c'); - expect( - stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', commaRoundTrip: true }), - ).toBe('b[]=&c=c'); - - // with strictNullHandling - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'indices', strictNullHandling: true }, - // ), - // 'b[0]&c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'brackets', strictNullHandling: true }, - // ), - // 'b[]&c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'repeat', strictNullHandling: true }, - // ), - // 'b&c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'comma', strictNullHandling: true }, - // ), - // 'b&c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'comma', strictNullHandling: true, commaRoundTrip: true }, - // ), - // 'b[]&c=c', - // ); - - expect( - stringify( - { a: [], b: [null], c: 'c' }, - { encode: false, arrayFormat: 'indices', strictNullHandling: true }, - ), - ).toBe('b[0]&c=c'); - expect( - stringify( - { a: [], b: [null], c: 'c' }, - { encode: false, arrayFormat: 'brackets', strictNullHandling: true }, - ), - ).toBe('b[]&c=c'); - expect( - stringify( - { a: [], b: [null], c: 'c' }, - { encode: false, arrayFormat: 'repeat', strictNullHandling: true }, - ), - ).toBe('b&c=c'); - expect( - stringify( - { a: [], b: [null], c: 'c' }, - { encode: false, arrayFormat: 'comma', strictNullHandling: true }, - ), - ).toBe('b&c=c'); - expect( - stringify( - { a: [], b: [null], c: 'c' }, - { encode: false, arrayFormat: 'comma', strictNullHandling: true, commaRoundTrip: true }, - ), - ).toBe('b[]&c=c'); - - // with skipNulls - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'indices', skipNulls: true }, - // ), - // 'c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'brackets', skipNulls: true }, - // ), - // 'c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'repeat', skipNulls: true }, - // ), - // 'c=c', - // ); - // st.equal( - // stringify( - // { a: [], b: [null], c: 'c' }, - // { encode: false, arrayFormat: 'comma', skipNulls: true }, - // ), - // 'c=c', - // ); - expect( - stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices', skipNulls: true }), - ).toBe('c=c'); - expect( - stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets', skipNulls: true }), - ).toBe('c=c'); - expect( - stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat', skipNulls: true }), - ).toBe('c=c'); - expect( - stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', skipNulls: true }), - ).toBe('c=c'); - }); - - test('stringifies a null object', function () { - var obj = Object.create(null); - obj.a = 'b'; - // st.equal(stringify(obj), 'a=b'); - expect(stringify(obj)).toBe('a=b'); - }); - - test('returns an empty string for invalid input', function () { - // st.equal(stringify(undefined), ''); - // st.equal(stringify(false), ''); - // st.equal(stringify(null), ''); - // st.equal(stringify(''), ''); - expect(stringify(undefined)).toBe(''); - expect(stringify(false)).toBe(''); - expect(stringify(null)).toBe(''); - expect(stringify('')).toBe(''); - }); - - test('stringifies an object with a null object as a child', function () { - var obj = { a: Object.create(null) }; - - obj.a.b = 'c'; - // st.equal(stringify(obj), 'a%5Bb%5D=c'); - expect(stringify(obj)).toBe('a%5Bb%5D=c'); - }); - - test('drops keys with a value of undefined', function () { - // st.equal(stringify({ a: undefined }), ''); - expect(stringify({ a: undefined })).toBe(''); - - // st.equal( - // stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), - // 'a%5Bc%5D', - // ); - // st.equal( - // stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), - // 'a%5Bc%5D=', - // ); - // st.equal(stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); - expect(stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true })).toBe('a%5Bc%5D'); - expect(stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false })).toBe('a%5Bc%5D='); - expect(stringify({ a: { b: undefined, c: '' } })).toBe('a%5Bc%5D='); - }); - - test('url encodes values', function () { - // st.equal(stringify({ a: 'b c' }), 'a=b%20c'); - expect(stringify({ a: 'b c' })).toBe('a=b%20c'); - }); - - test('stringifies a date', function () { - var now = new Date(); - var str = 'a=' + encodeURIComponent(now.toISOString()); - // st.equal(stringify({ a: now }), str); - expect(stringify({ a: now })).toBe(str); - }); - - test('stringifies the weird object from qs', function () { - // st.equal( - // stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), - // 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F', - // ); - expect(stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' })).toBe( - 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F', - ); - }); - - // TODO: Investigate how to to intercept in vitest - // TODO(rob) - test('skips properties that are part of the object prototype', function () { - // st.intercept(Object.prototype, 'crash', { value: 'test' }); - // @ts-expect-error - Object.prototype.crash = 'test'; - - // st.equal(stringify({ a: 'b' }), 'a=b'); - // st.equal(stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); - expect(stringify({ a: 'b' })).toBe('a=b'); - expect(stringify({ a: { b: 'c' } })).toBe('a%5Bb%5D=c'); - }); - - test('stringifies boolean values', function () { - // st.equal(stringify({ a: true }), 'a=true'); - // st.equal(stringify({ a: { b: true } }), 'a%5Bb%5D=true'); - // st.equal(stringify({ b: false }), 'b=false'); - // st.equal(stringify({ b: { c: false } }), 'b%5Bc%5D=false'); - expect(stringify({ a: true })).toBe('a=true'); - expect(stringify({ a: { b: true } })).toBe('a%5Bb%5D=true'); - expect(stringify({ b: false })).toBe('b=false'); - expect(stringify({ b: { c: false } })).toBe('b%5Bc%5D=false'); - }); - - test('stringifies buffer values', function () { - // st.equal(stringify({ a: Buffer.from('test') }), 'a=test'); - // st.equal(stringify({ a: { b: Buffer.from('test') } }), 'a%5Bb%5D=test'); - }); - - test('stringifies an object using an alternative delimiter', function () { - // st.equal(stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); - expect(stringify({ a: 'b', c: 'd' }, { delimiter: ';' })).toBe('a=b;c=d'); - }); - - // We dont target environments which dont even have Buffer - // test('does not blow up when Buffer global is missing', function () { - // var restore = mockProperty(global, 'Buffer', { delete: true }); - - // var result = stringify({ a: 'b', c: 'd' }); - - // restore(); - - // st.equal(result, 'a=b&c=d'); - // st.end(); - // }); - - test('does not crash when parsing circular references', function () { - var a: any = {}; - a.b = a; - - // st['throws']( - // function () { - // stringify({ 'foo[bar]': 'baz', 'foo[baz]': a }); - // }, - // /RangeError: Cyclic object value/, - // 'cyclic values throw', - // ); - expect(() => { - stringify({ 'foo[bar]': 'baz', 'foo[baz]': a }); - }).toThrow('Cyclic object value'); - - var circular: any = { - a: 'value', - }; - circular.a = circular; - // st['throws']( - // function () { - // stringify(circular); - // }, - // /RangeError: Cyclic object value/, - // 'cyclic values throw', - // ); - expect(() => { - stringify(circular); - }).toThrow('Cyclic object value'); - - var arr = ['a']; - // st.doesNotThrow(function () { - // stringify({ x: arr, y: arr }); - // }, 'non-cyclic values do not throw'); - expect(() => { - stringify({ x: arr, y: arr }); - }).not.toThrow(); - }); - - test('non-circular duplicated references can still work', function () { - var hourOfDay = { - function: 'hour_of_day', - }; - - var p1 = { - function: 'gte', - arguments: [hourOfDay, 0], - }; - var p2 = { - function: 'lte', - arguments: [hourOfDay, 23], - }; - - // st.equal( - // stringify( - // { filters: { $and: [p1, p2] } }, - // { encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'filters[$and][0][function]=gte&filters[$and][0][arguments][0][function]=hour_of_day&filters[$and][0][arguments][1]=0&filters[$and][1][function]=lte&filters[$and][1][arguments][0][function]=hour_of_day&filters[$and][1][arguments][1]=23', - // ); - // st.equal( - // stringify( - // { filters: { $and: [p1, p2] } }, - // { encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'filters[$and][][function]=gte&filters[$and][][arguments][][function]=hour_of_day&filters[$and][][arguments][]=0&filters[$and][][function]=lte&filters[$and][][arguments][][function]=hour_of_day&filters[$and][][arguments][]=23', - // ); - // st.equal( - // stringify( - // { filters: { $and: [p1, p2] } }, - // { encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'filters[$and][function]=gte&filters[$and][arguments][function]=hour_of_day&filters[$and][arguments]=0&filters[$and][function]=lte&filters[$and][arguments][function]=hour_of_day&filters[$and][arguments]=23', - // ); - expect( - stringify({ filters: { $and: [p1, p2] } }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - ).toBe( - 'filters[$and][0][function]=gte&filters[$and][0][arguments][0][function]=hour_of_day&filters[$and][0][arguments][1]=0&filters[$and][1][function]=lte&filters[$and][1][arguments][0][function]=hour_of_day&filters[$and][1][arguments][1]=23', - ); - expect( - stringify({ filters: { $and: [p1, p2] } }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe( - 'filters[$and][][function]=gte&filters[$and][][arguments][][function]=hour_of_day&filters[$and][][arguments][]=0&filters[$and][][function]=lte&filters[$and][][arguments][][function]=hour_of_day&filters[$and][][arguments][]=23', - ); - expect( - stringify({ filters: { $and: [p1, p2] } }, { encodeValuesOnly: true, arrayFormat: 'repeat' }), - ).toBe( - 'filters[$and][function]=gte&filters[$and][arguments][function]=hour_of_day&filters[$and][arguments]=0&filters[$and][function]=lte&filters[$and][arguments][function]=hour_of_day&filters[$and][arguments]=23', - ); - }); - - test('selects properties when filter=array', function () { - // st.equal(stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); - // st.equal(stringify({ a: 1 }, { filter: [] }), ''); - expect(stringify({ a: 'b' }, { filter: ['a'] })).toBe('a=b'); - expect(stringify({ a: 1 }, { filter: [] })).toBe(''); - - // st.equal( - // stringify( - // { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - // { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' }, - // ), - // 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', - // 'indices => indices', - // ); - // st.equal( - // stringify( - // { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - // { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' }, - // ), - // 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', - // 'brackets => brackets', - // ); - // st.equal( - // stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2] }), - // 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', - // 'default => indices', - // ); - expect(stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2] })).toBe( - 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', - ); - expect( - stringify( - { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' }, - ), - ).toBe('a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3'); - expect( - stringify( - { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' }, - ), - ).toBe('a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3'); - }); - - test('supports custom representations when filter=function', function () { - var calls = 0; - var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; - var filterFunc: StringifyOptions['filter'] = function (prefix, value) { - calls += 1; - if (calls === 1) { - // st.equal(prefix, '', 'prefix is empty'); - // st.equal(value, obj); - expect(prefix).toBe(''); - expect(value).toBe(obj); - } else if (prefix === 'c') { - return void 0; - } else if (value instanceof Date) { - // st.equal(prefix, 'e[f]'); - expect(prefix).toBe('e[f]'); - return value.getTime(); - } - return value; - }; - - // st.equal(stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); - // st.equal(calls, 5); - expect(stringify(obj, { filter: filterFunc })).toBe('a=b&e%5Bf%5D=1257894000000'); - expect(calls).toBe(5); - }); - - test('can disable uri encoding', function () { - // st.equal(stringify({ a: 'b' }, { encode: false }), 'a=b'); - // st.equal(stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); - // st.equal( - // stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), - // 'a=b&c', - // ); - expect(stringify({ a: 'b' }, { encode: false })).toBe('a=b'); - expect(stringify({ a: { b: 'c' } }, { encode: false })).toBe('a[b]=c'); - expect(stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false })).toBe('a=b&c'); - }); - - test('can sort the keys', function () { - // @ts-expect-error - var sort: NonNullable = function (a: string, b: string) { - return a.localeCompare(b); - }; - // st.equal(stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); - // st.equal( - // stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), - // 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a', - // ); - expect(stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort })).toBe('a=c&b=f&z=y'); - expect(stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort })).toBe( - 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a', - ); - }); - - test('can sort the keys at depth 3 or more too', function () { - // @ts-expect-error - var sort: NonNullable = function (a: string, b: string) { - return a.localeCompare(b); - }; - // st.equal( - // stringify( - // { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - // { sort: sort, encode: false }, - // ), - // 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb', - // ); - // st.equal( - // stringify( - // { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - // { sort: null, encode: false }, - // ), - // 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b', - // ); - expect( - stringify( - { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - { sort: sort, encode: false }, - ), - ).toBe('a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb'); - expect( - stringify( - { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - { sort: null, encode: false }, - ), - ).toBe('a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b'); - }); - - test('can stringify with custom encoding', function () { - // st.equal( - // stringify( - // { 県: '大阪府', '': '' }, - // { - // encoder: function (str) { - // if (str.length === 0) { - // return ''; - // } - // var buf = iconv.encode(str, 'shiftjis'); - // var result = []; - // for (var i = 0; i < buf.length; ++i) { - // result.push(buf.readUInt8(i).toString(16)); - // } - // return '%' + result.join('%'); - // }, - // }, - // ), - // '%8c%a7=%91%e5%8d%e3%95%7b&=', - // ); - expect( - stringify( - { 県: '大阪府', '': '' }, - { - encoder: function (str) { - if (str.length === 0) { - return ''; - } - var buf = iconv.encode(str, 'shiftjis'); - var result = []; - for (var i = 0; i < buf.length; ++i) { - result.push(buf.readUInt8(i).toString(16)); - } - return '%' + result.join('%'); - }, - }, - ), - ).toBe('%8c%a7=%91%e5%8d%e3%95%7b&='); - }); - - test('receives the default encoder as a second argument', function () { - // stringify( - // { a: 1, b: new Date(), c: true, d: [1] }, - // { - // encoder: function (str) { - // st.match(typeof str, /^(?:string|number|boolean)$/); - // return ''; - // }, - // }, - // ); - - stringify( - { a: 1, b: new Date(), c: true, d: [1] }, - { - encoder: function (str) { - // st.match(typeof str, /^(?:string|number|boolean)$/); - assert.match(typeof str, /^(?:string|number|boolean)$/); - return ''; - }, - }, - ); - }); - - test('receives the default encoder as a second argument', function () { - // stringify( - // { a: 1 }, - // { - // encoder: function (str, defaultEncoder) { - // st.equal(defaultEncoder, utils.encode); - // }, - // }, - // ); - - stringify( - { a: 1 }, - { - // @ts-ignore - encoder: function (_str, defaultEncoder) { - expect(defaultEncoder).toBe(encode); - }, - }, - ); - }); - - test('throws error with wrong encoder', function () { - // st['throws'](function () { - // stringify({}, { encoder: 'string' }); - // }, new TypeError('Encoder has to be a function.')); - // st.end(); - expect(() => { - // @ts-expect-error - stringify({}, { encoder: 'string' }); - }).toThrow(TypeError); - }); - - (typeof Buffer === 'undefined' ? test.skip : test)( - 'can use custom encoder for a buffer object', - function () { - // st.equal( - // stringify( - // { a: Buffer.from([1]) }, - // { - // encoder: function (buffer) { - // if (typeof buffer === 'string') { - // return buffer; - // } - // return String.fromCharCode(buffer.readUInt8(0) + 97); - // }, - // }, - // ), - // 'a=b', - // ); - expect( - stringify( - { a: Buffer.from([1]) }, - { - encoder: function (buffer) { - if (typeof buffer === 'string') { - return buffer; - } - return String.fromCharCode(buffer.readUInt8(0) + 97); - }, - }, - ), - ).toBe('a=b'); - - // st.equal( - // stringify( - // { a: Buffer.from('a b') }, - // { - // encoder: function (buffer) { - // return buffer; - // }, - // }, - // ), - // 'a=a b', - // ); - expect( - stringify( - { a: Buffer.from('a b') }, - { - encoder: function (buffer) { - return buffer; - }, - }, - ), - ).toBe('a=a b'); - }, - ); - - test('serializeDate option', function () { - var date = new Date(); - // st.equal( - // stringify({ a: date }), - // 'a=' + date.toISOString().replace(/:/g, '%3A'), - // 'default is toISOString', - // ); - expect(stringify({ a: date })).toBe('a=' + date.toISOString().replace(/:/g, '%3A')); - - var mutatedDate = new Date(); - mutatedDate.toISOString = function () { - throw new SyntaxError(); - }; - // st['throws'](function () { - // mutatedDate.toISOString(); - // }, SyntaxError); - expect(() => { - mutatedDate.toISOString(); - }).toThrow(SyntaxError); - // st.equal( - // stringify({ a: mutatedDate }), - // 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), - // 'toISOString works even when method is not locally present', - // ); - expect(stringify({ a: mutatedDate })).toBe( - 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), - ); - - var specificDate = new Date(6); - // st.equal( - // stringify( - // { a: specificDate }, - // { - // serializeDate: function (d) { - // return d.getTime() * 7; - // }, - // }, - // ), - // 'a=42', - // 'custom serializeDate function called', - // ); - expect( - stringify( - { a: specificDate }, - { - // @ts-ignore - serializeDate: function (d) { - return d.getTime() * 7; - }, - }, - ), - ).toBe('a=42'); - - // st.equal( - // stringify( - // { a: [date] }, - // { - // serializeDate: function (d) { - // return d.getTime(); - // }, - // arrayFormat: 'comma', - // }, - // ), - // 'a=' + date.getTime(), - // 'works with arrayFormat comma', - // ); - // st.equal( - // stringify( - // { a: [date] }, - // { - // serializeDate: function (d) { - // return d.getTime(); - // }, - // arrayFormat: 'comma', - // commaRoundTrip: true, - // }, - // ), - // 'a%5B%5D=' + date.getTime(), - // 'works with arrayFormat comma', - // ); - expect( - stringify( - { a: [date] }, - { - // @ts-expect-error - serializeDate: function (d) { - return d.getTime(); - }, - arrayFormat: 'comma', - }, - ), - ).toBe('a=' + date.getTime()); - expect( - stringify( - { a: [date] }, - { - // @ts-expect-error - serializeDate: function (d) { - return d.getTime(); - }, - arrayFormat: 'comma', - commaRoundTrip: true, - }, - ), - ).toBe('a%5B%5D=' + date.getTime()); - }); - - test('RFC 1738 serialization', function () { - // st.equal(stringify({ a: 'b c' }, { format: formats.RFC1738 }), 'a=b+c'); - // st.equal(stringify({ 'a b': 'c d' }, { format: formats.RFC1738 }), 'a+b=c+d'); - // st.equal( - // stringify({ 'a b': Buffer.from('a b') }, { format: formats.RFC1738 }), - // 'a+b=a+b', - // ); - expect(stringify({ a: 'b c' }, { format: 'RFC1738' })).toBe('a=b+c'); - expect(stringify({ 'a b': 'c d' }, { format: 'RFC1738' })).toBe('a+b=c+d'); - expect(stringify({ 'a b': Buffer.from('a b') }, { format: 'RFC1738' })).toBe('a+b=a+b'); - - // st.equal(stringify({ 'foo(ref)': 'bar' }, { format: formats.RFC1738 }), 'foo(ref)=bar'); - expect(stringify({ 'foo(ref)': 'bar' }, { format: 'RFC1738' })).toBe('foo(ref)=bar'); - }); - - test('RFC 3986 spaces serialization', function () { - // st.equal(stringify({ a: 'b c' }, { format: formats.RFC3986 }), 'a=b%20c'); - // st.equal(stringify({ 'a b': 'c d' }, { format: formats.RFC3986 }), 'a%20b=c%20d'); - // st.equal( - // stringify({ 'a b': Buffer.from('a b') }, { format: formats.RFC3986 }), - // 'a%20b=a%20b', - // ); - expect(stringify({ a: 'b c' }, { format: 'RFC3986' })).toBe('a=b%20c'); - expect(stringify({ 'a b': 'c d' }, { format: 'RFC3986' })).toBe('a%20b=c%20d'); - expect(stringify({ 'a b': Buffer.from('a b') }, { format: 'RFC3986' })).toBe('a%20b=a%20b'); - }); - - test('Backward compatibility to RFC 3986', function () { - // st.equal(stringify({ a: 'b c' }), 'a=b%20c'); - // st.equal(stringify({ 'a b': Buffer.from('a b') }), 'a%20b=a%20b'); - expect(stringify({ a: 'b c' })).toBe('a=b%20c'); - expect(stringify({ 'a b': Buffer.from('a b') })).toBe('a%20b=a%20b'); - }); - - test('Edge cases and unknown formats', function () { - ['UFO1234', false, 1234, null, {}, []].forEach(function (format) { - // st['throws'](function () { - // stringify({ a: 'b c' }, { format: format }); - // }, new TypeError('Unknown format option provided.')); - expect(() => { - // @ts-expect-error - stringify({ a: 'b c' }, { format: format }); - }).toThrow(TypeError); - }); - }); - - test('encodeValuesOnly', function () { - // st.equal( - // stringify( - // { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - // { encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h', - // 'encodeValuesOnly + indices', - // ); - // st.equal( - // stringify( - // { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - // { encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a=b&c[]=d&c[]=e%3Df&f[][]=g&f[][]=h', - // 'encodeValuesOnly + brackets', - // ); - // st.equal( - // stringify( - // { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - // { encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a=b&c=d&c=e%3Df&f=g&f=h', - // 'encodeValuesOnly + repeat', - // ); - expect( - stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true, arrayFormat: 'indices' }, - ), - ).toBe('a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); - expect( - stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true, arrayFormat: 'brackets' }, - ), - ).toBe('a=b&c[]=d&c[]=e%3Df&f[][]=g&f[][]=h'); - expect( - stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true, arrayFormat: 'repeat' }, - ), - ).toBe('a=b&c=d&c=e%3Df&f=g&f=h'); - - // st.equal( - // stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'indices' }), - // 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h', - // 'no encodeValuesOnly + indices', - // ); - // st.equal( - // stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'brackets' }), - // 'a=b&c%5B%5D=d&c%5B%5D=e&f%5B%5D%5B%5D=g&f%5B%5D%5B%5D=h', - // 'no encodeValuesOnly + brackets', - // ); - // st.equal( - // stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'repeat' }), - // 'a=b&c=d&c=e&f=g&f=h', - // 'no encodeValuesOnly + repeat', - // ); - expect(stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'indices' })).toBe( - 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h', - ); - expect(stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'brackets' })).toBe( - 'a=b&c%5B%5D=d&c%5B%5D=e&f%5B%5D%5B%5D=g&f%5B%5D%5B%5D=h', - ); - expect(stringify({ a: 'b', c: ['d', 'e'], f: [['g'], ['h']] }, { arrayFormat: 'repeat' })).toBe( - 'a=b&c=d&c=e&f=g&f=h', - ); - }); - - test('encodeValuesOnly - strictNullHandling', function () { - // st.equal( - // stringify({ a: { b: null } }, { encodeValuesOnly: true, strictNullHandling: true }), - // 'a[b]', - // ); - expect(stringify({ a: { b: null } }, { encodeValuesOnly: true, strictNullHandling: true })).toBe('a[b]'); - }); - - test('throws if an invalid charset is specified', function () { - // st['throws'](function () { - // stringify({ a: 'b' }, { charset: 'foobar' }); - // }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); - expect(() => { - // @ts-expect-error - stringify({ a: 'b' }, { charset: 'foobar' }); - }).toThrow(TypeError); - }); - - test('respects a charset of iso-8859-1', function () { - // st.equal(stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }), '%E6=%E6'); - expect(stringify({ æ: 'æ' }, { charset: 'iso-8859-1' })).toBe('%E6=%E6'); - }); - - test('encodes unrepresentable chars as numeric entities in iso-8859-1 mode', function () { - // st.equal(stringify({ a: '☺' }, { charset: 'iso-8859-1' }), 'a=%26%239786%3B'); - expect(stringify({ a: '☺' }, { charset: 'iso-8859-1' })).toBe('a=%26%239786%3B'); - }); - - test('respects an explicit charset of utf-8 (the default)', function () { - // st.equal(stringify({ a: 'æ' }, { charset: 'utf-8' }), 'a=%C3%A6'); - expect(stringify({ a: 'æ' }, { charset: 'utf-8' })).toBe('a=%C3%A6'); - }); - - test('`charsetSentinel` option', function () { - // st.equal( - // stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' }), - // 'utf8=%E2%9C%93&a=%C3%A6', - // 'adds the right sentinel when instructed to and the charset is utf-8', - // ); - expect(stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' })).toBe( - 'utf8=%E2%9C%93&a=%C3%A6', - ); - - // st.equal( - // stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }), - // 'utf8=%26%2310003%3B&a=%E6', - // 'adds the right sentinel when instructed to and the charset is iso-8859-1', - // ); - expect(stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' })).toBe( - 'utf8=%26%2310003%3B&a=%E6', - ); - }); - - test('does not mutate the options argument', function () { - var options = {}; - stringify({}, options); - // st.deepEqual(options, {}); - expect(options).toEqual({}); - }); - - test('strictNullHandling works with custom filter', function () { - // @ts-expect-error - var filter = function (_prefix, value) { - return value; - }; - - var options = { strictNullHandling: true, filter: filter }; - // st.equal(stringify({ key: null }, options), 'key'); - expect(stringify({ key: null }, options)).toBe('key'); - }); - - test('strictNullHandling works with null serializeDate', function () { - var serializeDate = function () { - return null; - }; - var options = { strictNullHandling: true, serializeDate: serializeDate }; - var date = new Date(); - // st.equal(stringify({ key: date }, options), 'key'); - // @ts-expect-error - expect(stringify({ key: date }, options)).toBe('key'); - }); - - test('allows for encoding keys and values differently', function () { - // @ts-expect-error - var encoder = function (str, defaultEncoder, charset, type) { - if (type === 'key') { - return defaultEncoder(str, defaultEncoder, charset, type).toLowerCase(); - } - if (type === 'value') { - return defaultEncoder(str, defaultEncoder, charset, type).toUpperCase(); - } - throw 'this should never happen! type: ' + type; - }; - - // st.deepEqual(stringify({ KeY: 'vAlUe' }, { encoder: encoder }), 'key=VALUE'); - expect(stringify({ KeY: 'vAlUe' }, { encoder: encoder })).toBe('key=VALUE'); - }); - - test('objects inside arrays', function () { - var obj = { a: { b: { c: 'd', e: 'f' } } }; - var withArray = { a: { b: [{ c: 'd', e: 'f' }] } }; - - // st.equal( - // stringify(obj, { encode: false }), - // 'a[b][c]=d&a[b][e]=f', - // 'no array, no arrayFormat', - // ); - // st.equal( - // stringify(obj, { encode: false, arrayFormat: 'brackets' }), - // 'a[b][c]=d&a[b][e]=f', - // 'no array, bracket', - // ); - // st.equal( - // stringify(obj, { encode: false, arrayFormat: 'indices' }), - // 'a[b][c]=d&a[b][e]=f', - // 'no array, indices', - // ); - // st.equal( - // stringify(obj, { encode: false, arrayFormat: 'repeat' }), - // 'a[b][c]=d&a[b][e]=f', - // 'no array, repeat', - // ); - // st.equal( - // stringify(obj, { encode: false, arrayFormat: 'comma' }), - // 'a[b][c]=d&a[b][e]=f', - // 'no array, comma', - // ); - expect(stringify(obj, { encode: false })).toBe('a[b][c]=d&a[b][e]=f'); - expect(stringify(obj, { encode: false, arrayFormat: 'brackets' })).toBe('a[b][c]=d&a[b][e]=f'); - expect(stringify(obj, { encode: false, arrayFormat: 'indices' })).toBe('a[b][c]=d&a[b][e]=f'); - expect(stringify(obj, { encode: false, arrayFormat: 'repeat' })).toBe('a[b][c]=d&a[b][e]=f'); - expect(stringify(obj, { encode: false, arrayFormat: 'comma' })).toBe('a[b][c]=d&a[b][e]=f'); - - // st.equal( - // stringify(withArray, { encode: false }), - // 'a[b][0][c]=d&a[b][0][e]=f', - // 'array, no arrayFormat', - // ); - // st.equal( - // stringify(withArray, { encode: false, arrayFormat: 'brackets' }), - // 'a[b][][c]=d&a[b][][e]=f', - // 'array, bracket', - // ); - // st.equal( - // stringify(withArray, { encode: false, arrayFormat: 'indices' }), - // 'a[b][0][c]=d&a[b][0][e]=f', - // 'array, indices', - // ); - // st.equal( - // stringify(withArray, { encode: false, arrayFormat: 'repeat' }), - // 'a[b][c]=d&a[b][e]=f', - // 'array, repeat', - // ); - // st.equal( - // stringify(withArray, { encode: false, arrayFormat: 'comma' }), - // '???', - // 'array, comma', - // { skip: 'TODO: figure out what this should do' }, - // ); - expect(stringify(withArray, { encode: false })).toBe('a[b][0][c]=d&a[b][0][e]=f'); - expect(stringify(withArray, { encode: false, arrayFormat: 'brackets' })).toBe('a[b][][c]=d&a[b][][e]=f'); - expect(stringify(withArray, { encode: false, arrayFormat: 'indices' })).toBe('a[b][0][c]=d&a[b][0][e]=f'); - expect(stringify(withArray, { encode: false, arrayFormat: 'repeat' })).toBe('a[b][c]=d&a[b][e]=f'); - // !TODo: Figure out what this should do - // expect(stringify(withArray, { encode: false, arrayFormat: 'comma' })).toBe( - // 'a[b][c]=d&a[b][e]=f', - // ); - }); - - test('stringifies sparse arrays', function () { - // st.equal( - // stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - // 'a[1]=2&a[4]=1', - // ); - // st.equal( - // stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - // 'a[]=2&a[]=1', - // ); - // st.equal( - // stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'repeat' }), - // 'a=2&a=1', - // ); - expect(stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'indices' })).toBe( - 'a[1]=2&a[4]=1', - ); - expect(stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' })).toBe( - 'a[]=2&a[]=1', - ); - expect(stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true, arrayFormat: 'repeat' })).toBe( - 'a=2&a=1', - ); - - // st.equal( - // stringify( - // { a: [, { b: [, , { c: '1' }] }] }, - // { encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a[1][b][2][c]=1', - // ); - // st.equal( - // stringify( - // { a: [, { b: [, , { c: '1' }] }] }, - // { encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a[][b][][c]=1', - // ); - // st.equal( - // stringify( - // { a: [, { b: [, , { c: '1' }] }] }, - // { encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a[b][c]=1', - // ); - expect( - stringify({ a: [, { b: [, , { c: '1' }] }] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - ).toBe('a[1][b][2][c]=1'); - expect( - stringify({ a: [, { b: [, , { c: '1' }] }] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe('a[][b][][c]=1'); - expect( - stringify({ a: [, { b: [, , { c: '1' }] }] }, { encodeValuesOnly: true, arrayFormat: 'repeat' }), - ).toBe('a[b][c]=1'); - - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: '1' }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a[1][2][3][c]=1', - // ); - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: '1' }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a[][][][c]=1', - // ); - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: '1' }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a[c]=1', - // ); - expect( - stringify({ a: [, [, , [, , , { c: '1' }]]] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - ).toBe('a[1][2][3][c]=1'); - expect( - stringify({ a: [, [, , [, , , { c: '1' }]]] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe('a[][][][c]=1'); - expect( - stringify({ a: [, [, , [, , , { c: '1' }]]] }, { encodeValuesOnly: true, arrayFormat: 'repeat' }), - ).toBe('a[c]=1'); - - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: [, '1'] }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'indices' }, - // ), - // 'a[1][2][3][c][1]=1', - // ); - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: [, '1'] }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'brackets' }, - // ), - // 'a[][][][c][]=1', - // ); - // st.equal( - // stringify( - // { a: [, [, , [, , , { c: [, '1'] }]]] }, - // { encodeValuesOnly: true, arrayFormat: 'repeat' }, - // ), - // 'a[c]=1', - // ); - expect( - stringify({ a: [, [, , [, , , { c: [, '1'] }]]] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), - ).toBe('a[1][2][3][c][1]=1'); - expect( - stringify({ a: [, [, , [, , , { c: [, '1'] }]]] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - ).toBe('a[][][][c][]=1'); - expect( - stringify({ a: [, [, , [, , , { c: [, '1'] }]]] }, { encodeValuesOnly: true, arrayFormat: 'repeat' }), - ).toBe('a[c]=1'); - }); - - test('encodes a very long string', function () { - var chars = []; - var expected = []; - for (var i = 0; i < 5e3; i++) { - chars.push(' ' + i); - - expected.push('%20' + i); - } - - var obj = { - foo: chars.join(''), - }; - - // st.equal( - // stringify(obj, { arrayFormat: 'bracket', charset: 'utf-8' }), - // 'foo=' + expected.join(''), - // ); - // @ts-expect-error - expect(stringify(obj, { arrayFormat: 'bracket', charset: 'utf-8' })).toBe('foo=' + expected.join('')); - }); -}); - -describe('stringifies empty keys', function () { - empty_test_cases.forEach(function (testCase) { - test('stringifies an object with empty string key with ' + testCase.input, function () { - // st.deepEqual( - // stringify(testCase.withEmptyKeys, { encode: false, arrayFormat: 'indices' }), - // testCase.stringifyOutput.indices, - // 'test case: ' + testCase.input + ', indices', - // ); - // st.deepEqual( - // stringify(testCase.withEmptyKeys, { encode: false, arrayFormat: 'brackets' }), - // testCase.stringifyOutput.brackets, - // 'test case: ' + testCase.input + ', brackets', - // ); - // st.deepEqual( - // stringify(testCase.withEmptyKeys, { encode: false, arrayFormat: 'repeat' }), - // testCase.stringifyOutput.repeat, - // 'test case: ' + testCase.input + ', repeat', - // ); - expect(stringify(testCase.with_empty_keys, { encode: false, arrayFormat: 'indices' })).toBe( - testCase.stringify_output.indices, - ); - expect(stringify(testCase.with_empty_keys, { encode: false, arrayFormat: 'brackets' })).toBe( - testCase.stringify_output.brackets, - ); - expect(stringify(testCase.with_empty_keys, { encode: false, arrayFormat: 'repeat' })).toBe( - testCase.stringify_output.repeat, - ); - }); - }); - - test('edge case with object/arrays', function () { - // st.deepEqual(stringify({ '': { '': [2, 3] } }, { encode: false }), '[][0]=2&[][1]=3'); - // st.deepEqual( - // stringify({ '': { '': [2, 3], a: 2 } }, { encode: false }), - // '[][0]=2&[][1]=3&[a]=2', - // ); - // st.deepEqual( - // stringify({ '': { '': [2, 3] } }, { encode: false, arrayFormat: 'indices' }), - // '[][0]=2&[][1]=3', - // ); - // st.deepEqual( - // stringify({ '': { '': [2, 3], a: 2 } }, { encode: false, arrayFormat: 'indices' }), - // '[][0]=2&[][1]=3&[a]=2', - // ); - expect(stringify({ '': { '': [2, 3] } }, { encode: false })).toBe('[][0]=2&[][1]=3'); - expect(stringify({ '': { '': [2, 3], a: 2 } }, { encode: false })).toBe('[][0]=2&[][1]=3&[a]=2'); - expect(stringify({ '': { '': [2, 3] } }, { encode: false, arrayFormat: 'indices' })).toBe( - '[][0]=2&[][1]=3', - ); - expect(stringify({ '': { '': [2, 3], a: 2 } }, { encode: false, arrayFormat: 'indices' })).toBe( - '[][0]=2&[][1]=3&[a]=2', - ); - }); -}); diff --git a/tests/qs/utils.test.ts b/tests/qs/utils.test.ts deleted file mode 100644 index 77f1c90..0000000 --- a/tests/qs/utils.test.ts +++ /dev/null @@ -1,169 +0,0 @@ -import { combine, merge, is_buffer, assign_single_source } from 'brapi/internal/qs/utils'; - -describe('merge()', function () { - // t.deepEqual(merge(null, true), [null, true], 'merges true into null'); - expect(merge(null, true)).toEqual([null, true]); - - // t.deepEqual(merge(null, [42]), [null, 42], 'merges null into an array'); - expect(merge(null, [42])).toEqual([null, 42]); - - // t.deepEqual( - // merge({ a: 'b' }, { a: 'c' }), - // { a: ['b', 'c'] }, - // 'merges two objects with the same key', - // ); - expect(merge({ a: 'b' }, { a: 'c' })).toEqual({ a: ['b', 'c'] }); - - var oneMerged = merge({ foo: 'bar' }, { foo: { first: '123' } }); - // t.deepEqual( - // oneMerged, - // { foo: ['bar', { first: '123' }] }, - // 'merges a standalone and an object into an array', - // ); - expect(oneMerged).toEqual({ foo: ['bar', { first: '123' }] }); - - var twoMerged = merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); - // t.deepEqual( - // twoMerged, - // { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, - // 'merges a standalone and two objects into an array', - // ); - expect(twoMerged).toEqual({ foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }); - - var sandwiched = merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); - // t.deepEqual( - // sandwiched, - // { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, - // 'merges an object sandwiched by two standalones into an array', - // ); - expect(sandwiched).toEqual({ foo: ['bar', { first: '123', second: '456' }, 'baz'] }); - - var nestedArrays = merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); - // t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); - expect(nestedArrays).toEqual({ foo: ['baz', 'bar', 'xyzzy'] }); - - var noOptionsNonObjectSource = merge({ foo: 'baz' }, 'bar'); - // t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true }); - expect(noOptionsNonObjectSource).toEqual({ foo: 'baz', bar: true }); - - (typeof Object.defineProperty !== 'function' ? test.skip : test)( - 'avoids invoking array setters unnecessarily', - function () { - var setCount = 0; - var getCount = 0; - var observed: any[] = []; - Object.defineProperty(observed, 0, { - get: function () { - getCount += 1; - return { bar: 'baz' }; - }, - set: function () { - setCount += 1; - }, - }); - merge(observed, [null]); - // st.equal(setCount, 0); - // st.equal(getCount, 1); - expect(setCount).toEqual(0); - expect(getCount).toEqual(1); - observed[0] = observed[0]; - // st.equal(setCount, 1); - // st.equal(getCount, 2); - expect(setCount).toEqual(1); - expect(getCount).toEqual(2); - }, - ); -}); - -test('assign()', function () { - var target = { a: 1, b: 2 }; - var source = { b: 3, c: 4 }; - var result = assign_single_source(target, source); - - expect(result).toEqual(target); - expect(target).toEqual({ a: 1, b: 3, c: 4 }); - expect(source).toEqual({ b: 3, c: 4 }); -}); - -describe('combine()', function () { - test('both arrays', function () { - var a = [1]; - var b = [2]; - var combined = combine(a, b); - - // st.deepEqual(a, [1], 'a is not mutated'); - // st.deepEqual(b, [2], 'b is not mutated'); - // st.notEqual(a, combined, 'a !== combined'); - // st.notEqual(b, combined, 'b !== combined'); - // st.deepEqual(combined, [1, 2], 'combined is a + b'); - expect(a).toEqual([1]); - expect(b).toEqual([2]); - expect(combined).toEqual([1, 2]); - expect(a).not.toEqual(combined); - expect(b).not.toEqual(combined); - }); - - test('one array, one non-array', function () { - var aN = 1; - var a = [aN]; - var bN = 2; - var b = [bN]; - - var combinedAnB = combine(aN, b); - // st.deepEqual(b, [bN], 'b is not mutated'); - // st.notEqual(aN, combinedAnB, 'aN + b !== aN'); - // st.notEqual(a, combinedAnB, 'aN + b !== a'); - // st.notEqual(bN, combinedAnB, 'aN + b !== bN'); - // st.notEqual(b, combinedAnB, 'aN + b !== b'); - // st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array'); - expect(b).toEqual([bN]); - expect(combinedAnB).not.toEqual(aN); - expect(combinedAnB).not.toEqual(a); - expect(combinedAnB).not.toEqual(bN); - expect(combinedAnB).not.toEqual(b); - expect(combinedAnB).toEqual([1, 2]); - - var combinedABn = combine(a, bN); - // st.deepEqual(a, [aN], 'a is not mutated'); - // st.notEqual(aN, combinedABn, 'a + bN !== aN'); - // st.notEqual(a, combinedABn, 'a + bN !== a'); - // st.notEqual(bN, combinedABn, 'a + bN !== bN'); - // st.notEqual(b, combinedABn, 'a + bN !== b'); - // st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array'); - expect(a).toEqual([aN]); - expect(combinedABn).not.toEqual(aN); - expect(combinedABn).not.toEqual(a); - expect(combinedABn).not.toEqual(bN); - expect(combinedABn).not.toEqual(b); - expect(combinedABn).toEqual([1, 2]); - }); - - test('neither is an array', function () { - var combined = combine(1, 2); - // st.notEqual(1, combined, '1 + 2 !== 1'); - // st.notEqual(2, combined, '1 + 2 !== 2'); - // st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array'); - expect(combined).not.toEqual(1); - expect(combined).not.toEqual(2); - expect(combined).toEqual([1, 2]); - }); -}); - -test('is_buffer()', function () { - for (const x of [null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g]) { - // t.equal(is_buffer(x), false, inspect(x) + ' is not a buffer'); - expect(is_buffer(x)).toEqual(false); - } - - var fakeBuffer = { constructor: Buffer }; - // t.equal(is_buffer(fakeBuffer), false, 'fake buffer is not a buffer'); - expect(is_buffer(fakeBuffer)).toEqual(false); - - var saferBuffer = Buffer.from('abc'); - // t.equal(is_buffer(saferBuffer), true, 'SaferBuffer instance is a buffer'); - expect(is_buffer(saferBuffer)).toEqual(true); - - var buffer = Buffer.from('abc'); - // t.equal(is_buffer(buffer), true, 'real Buffer instance is a buffer'); - expect(is_buffer(buffer)).toEqual(true); -}); diff --git a/tests/sample_file.txt b/tests/sample_file.txt new file mode 100644 index 0000000..af5626b --- /dev/null +++ b/tests/sample_file.txt @@ -0,0 +1 @@ +Hello, world! diff --git a/tests/stringifyQuery.test.ts b/tests/stringifyQuery.test.ts deleted file mode 100644 index ec8010d..0000000 --- a/tests/stringifyQuery.test.ts +++ /dev/null @@ -1,23 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Brapi } from 'brapi'; - -const { stringifyQuery } = Brapi.prototype as any; - -describe(stringifyQuery, () => { - for (const [input, expected] of [ - [{ a: '1', b: 2, c: true }, 'a=1&b=2&c=true'], - [{ a: null, b: false, c: undefined }, 'a=&b=false'], - [{ 'a/b': 1.28341 }, `${encodeURIComponent('a/b')}=1.28341`], - [ - { 'a/b': 'c/d', 'e=f': 'g&h' }, - `${encodeURIComponent('a/b')}=${encodeURIComponent('c/d')}&${encodeURIComponent( - 'e=f', - )}=${encodeURIComponent('g&h')}`, - ], - ]) { - it(`${JSON.stringify(input)} -> ${expected}`, () => { - expect(stringifyQuery(input)).toEqual(expected); - }); - } -}); diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..0ef232e --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,1699 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import gc +import os +import sys +import json +import asyncio +import inspect +import tracemalloc +from typing import Any, Union, cast +from unittest import mock +from typing_extensions import Literal + +import httpx +import pytest +from respx import MockRouter +from pydantic import ValidationError + +from brapi import Brapi, AsyncBrapi, APIResponseValidationError +from brapi._types import Omit +from brapi._utils import asyncify +from brapi._models import BaseModel, FinalRequestOptions +from brapi._exceptions import BrapiError, APIStatusError, APITimeoutError, APIResponseValidationError +from brapi._base_client import ( + DEFAULT_TIMEOUT, + HTTPX_DEFAULT_TIMEOUT, + BaseClient, + OtherPlatform, + DefaultHttpxClient, + DefaultAsyncHttpxClient, + get_platform, + make_request_options, +) + +from .utils import update_env + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") +api_key = "My API Key" + + +def _get_params(client: BaseClient[Any, Any]) -> dict[str, str]: + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + return dict(url.params) + + +def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: + return 0.1 + + +def _get_open_connections(client: Brapi | AsyncBrapi) -> int: + transport = client._client._transport + assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) + + pool = transport._pool + return len(pool._requests) + + +class TestBrapi: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + @pytest.mark.respx(base_url=base_url) + def test_raw_response(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self) -> None: + copied = self.client.copy() + assert id(copied) != id(self.client) + + copied = self.client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert self.client.api_key == "My API Key" + + def test_copy_default_options(self) -> None: + # options that have a default are overridden correctly + copied = self.client.copy(max_retries=7) + assert copied.max_retries == 7 + assert self.client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(self.client.timeout, httpx.Timeout) + copied = self.client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(self.client.timeout, httpx.Timeout) + + def test_copy_default_headers(self) -> None: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + + def test_copy_default_query(self) -> None: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + def test_copy_signature(self) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + self.client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(self.client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client = self.client.copy() + client._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "brapi/_legacy_response.py", + "brapi/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "brapi/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + def test_request_timeout(self) -> None: + request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = self.client._build_request( + FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) + ) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + def test_client_timeout_option(self) -> None: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0)) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + with httpx.Client(timeout=None) as http_client: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + # no timeout given to the httpx client should not use the httpx default + with httpx.Client() as http_client: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + # explicitly passing the default timeout currently results in it being ignored + with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + async def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + async with httpx.AsyncClient() as http_client: + Brapi( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + def test_default_headers_option(self) -> None: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + client2 = Brapi( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + def test_validate_headers(self) -> None: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with pytest.raises(BrapiError): + with update_env(**{"BRAPI_API_KEY": Omit()}): + client2 = Brapi(base_url=base_url, api_key=None, _strict_response_validation=True) + _ = client2 + + def test_default_query_option(self) -> None: + client = Brapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + def test_request_extra_json(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, client: Brapi) -> None: + request = client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + def test_basic_union_response(self, respx_mock: MockRouter) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = self.client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + def test_base_url_setter(self) -> None: + client = Brapi(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + def test_base_url_env(self) -> None: + with update_env(BRAPI_BASE_URL="http://localhost:5000/from/env"): + client = Brapi(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + # explicit environment arg requires explicitness + with update_env(BRAPI_BASE_URL="http://localhost:5000/from/env"): + with pytest.raises(ValueError, match=r"you must pass base_url=None"): + Brapi(api_key=api_key, _strict_response_validation=True, environment="production") + + client = Brapi(base_url=None, api_key=api_key, _strict_response_validation=True, environment="production") + assert str(client.base_url).startswith("https://brapi.dev") + + @pytest.mark.parametrize( + "client", + [ + Brapi(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Brapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_trailing_slash(self, client: Brapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + Brapi(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Brapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_no_trailing_slash(self, client: Brapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + Brapi(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Brapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_absolute_request_url(self, client: Brapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + + def test_copied_client_does_not_close_http(self) -> None: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not client.is_closed() + + copied = client.copy() + assert copied is not client + + del copied + + assert not client.is_closed() + + def test_client_context_manager(self) -> None: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + with client as c2: + assert c2 is client + assert not c2.is_closed() + assert not client.is_closed() + assert client.is_closed() + + @pytest.mark.respx(base_url=base_url) + def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + self.client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None)) + + @pytest.mark.respx(base_url=base_url) + def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + strict_client.get("/foo", cast_to=Model) + + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: + client = Brapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Brapi) -> None: + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + client.quote.with_streaming_response.retrieve(tickers="PETR4,MGLU3").__enter__() + + assert _get_open_connections(self.client) == 0 + + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Brapi) -> None: + respx_mock.get("/api/quote/PETR4,MGLU3").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + client.quote.with_streaming_response.retrieve(tickers="PETR4,MGLU3").__enter__() + assert _get_open_connections(self.client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + def test_retries_taken( + self, + client: Brapi, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = client.quote.with_raw_response.retrieve(tickers="PETR4,MGLU3") + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_omit_retry_count_header(self, client: Brapi, failures_before_success: int, respx_mock: MockRouter) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_overwrite_retry_count_header( + self, client: Brapi, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects(self, respx_mock: MockRouter) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + self.client.post( + "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response + ) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" + + +class TestAsyncBrapi: + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_raw_response(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = await self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self) -> None: + copied = self.client.copy() + assert id(copied) != id(self.client) + + copied = self.client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert self.client.api_key == "My API Key" + + def test_copy_default_options(self) -> None: + # options that have a default are overridden correctly + copied = self.client.copy(max_retries=7) + assert copied.max_retries == 7 + assert self.client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(self.client.timeout, httpx.Timeout) + copied = self.client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(self.client.timeout, httpx.Timeout) + + def test_copy_default_headers(self) -> None: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + + def test_copy_default_query(self) -> None: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + def test_copy_signature(self) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + self.client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(self.client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client = self.client.copy() + client._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "brapi/_legacy_response.py", + "brapi/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "brapi/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + async def test_request_timeout(self) -> None: + request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = self.client._build_request( + FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) + ) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + async def test_client_timeout_option(self) -> None: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0) + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + async def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + async with httpx.AsyncClient(timeout=None) as http_client: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + # no timeout given to the httpx client should not use the httpx default + async with httpx.AsyncClient() as http_client: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + # explicitly passing the default timeout currently results in it being ignored + async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + with httpx.Client() as http_client: + AsyncBrapi( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + def test_default_headers_option(self) -> None: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + client2 = AsyncBrapi( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + def test_validate_headers(self) -> None: + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with pytest.raises(BrapiError): + with update_env(**{"BRAPI_API_KEY": Omit()}): + client2 = AsyncBrapi(base_url=base_url, api_key=None, _strict_response_validation=True) + _ = client2 + + def test_default_query_option(self) -> None: + client = AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + def test_request_extra_json(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, async_client: AsyncBrapi) -> None: + request = async_client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + async def test_basic_union_response(self, respx_mock: MockRouter) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + async def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + async def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = await self.client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + def test_base_url_setter(self) -> None: + client = AsyncBrapi(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + def test_base_url_env(self) -> None: + with update_env(BRAPI_BASE_URL="http://localhost:5000/from/env"): + client = AsyncBrapi(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + # explicit environment arg requires explicitness + with update_env(BRAPI_BASE_URL="http://localhost:5000/from/env"): + with pytest.raises(ValueError, match=r"you must pass base_url=None"): + AsyncBrapi(api_key=api_key, _strict_response_validation=True, environment="production") + + client = AsyncBrapi( + base_url=None, api_key=api_key, _strict_response_validation=True, environment="production" + ) + assert str(client.base_url).startswith("https://brapi.dev") + + @pytest.mark.parametrize( + "client", + [ + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_trailing_slash(self, client: AsyncBrapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_no_trailing_slash(self, client: AsyncBrapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncBrapi( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_absolute_request_url(self, client: AsyncBrapi) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + + async def test_copied_client_does_not_close_http(self) -> None: + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not client.is_closed() + + copied = client.copy() + assert copied is not client + + del copied + + await asyncio.sleep(0.2) + assert not client.is_closed() + + async def test_client_context_manager(self) -> None: + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + async with client as c2: + assert c2 is client + assert not c2.is_closed() + assert not client.is_closed() + assert client.is_closed() + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + await self.client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + async def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + AsyncBrapi( + base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None) + ) + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + await strict_client.get("/foo", cast_to=Model) + + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = await client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + @pytest.mark.asyncio + async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: + client = AsyncBrapi(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncBrapi) -> None: + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + await async_client.quote.with_streaming_response.retrieve(tickers="PETR4,MGLU3").__aenter__() + + assert _get_open_connections(self.client) == 0 + + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncBrapi) -> None: + respx_mock.get("/api/quote/PETR4,MGLU3").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + await async_client.quote.with_streaming_response.retrieve(tickers="PETR4,MGLU3").__aenter__() + assert _get_open_connections(self.client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + async def test_retries_taken( + self, + async_client: AsyncBrapi, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = await client.quote.with_raw_response.retrieve(tickers="PETR4,MGLU3") + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_omit_retry_count_header( + self, async_client: AsyncBrapi, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = await client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("brapi._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_overwrite_retry_count_header( + self, async_client: AsyncBrapi, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.get("/api/quote/PETR4,MGLU3").mock(side_effect=retry_handler) + + response = await client.quote.with_raw_response.retrieve( + tickers="PETR4,MGLU3", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + async def test_get_platform(self) -> None: + platform = await asyncify(get_platform)() + assert isinstance(platform, (str, OtherPlatform)) + + async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultAsyncHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + async def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultAsyncHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects(self, respx_mock: MockRouter) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = await self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + await self.client.post( + "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response + ) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py new file mode 100644 index 0000000..f5639fb --- /dev/null +++ b/tests/test_deepcopy.py @@ -0,0 +1,58 @@ +from brapi._utils import deepcopy_minimal + + +def assert_different_identities(obj1: object, obj2: object) -> None: + assert obj1 == obj2 + assert id(obj1) != id(obj2) + + +def test_simple_dict() -> None: + obj1 = {"foo": "bar"} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_dict() -> None: + obj1 = {"foo": {"bar": True}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + + +def test_complex_nested_dict() -> None: + obj1 = {"foo": {"bar": [{"hello": "world"}]}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + assert_different_identities(obj1["foo"]["bar"], obj2["foo"]["bar"]) + assert_different_identities(obj1["foo"]["bar"][0], obj2["foo"]["bar"][0]) + + +def test_simple_list() -> None: + obj1 = ["a", "b", "c"] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_list() -> None: + obj1 = ["a", [1, 2, 3]] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1[1], obj2[1]) + + +class MyObject: ... + + +def test_ignores_other_types() -> None: + # custom classes + my_obj = MyObject() + obj1 = {"foo": my_obj} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert obj1["foo"] is my_obj + + # tuples + obj3 = ("a", "b") + obj4 = deepcopy_minimal(obj3) + assert obj3 is obj4 diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py new file mode 100644 index 0000000..9b6f362 --- /dev/null +++ b/tests/test_extract_files.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import Sequence + +import pytest + +from brapi._types import FileTypes +from brapi._utils import extract_files + + +def test_removes_files_from_input() -> None: + query = {"foo": "bar"} + assert extract_files(query, paths=[]) == [] + assert query == {"foo": "bar"} + + query2 = {"foo": b"Bar", "hello": "world"} + assert extract_files(query2, paths=[["foo"]]) == [("foo", b"Bar")] + assert query2 == {"hello": "world"} + + query3 = {"foo": {"foo": {"bar": b"Bar"}}, "hello": "world"} + assert extract_files(query3, paths=[["foo", "foo", "bar"]]) == [("foo[foo][bar]", b"Bar")] + assert query3 == {"foo": {"foo": {}}, "hello": "world"} + + query4 = {"foo": {"bar": b"Bar", "baz": "foo"}, "hello": "world"} + assert extract_files(query4, paths=[["foo", "bar"]]) == [("foo[bar]", b"Bar")] + assert query4 == {"hello": "world", "foo": {"baz": "foo"}} + + +def test_multiple_files() -> None: + query = {"documents": [{"file": b"My first file"}, {"file": b"My second file"}]} + assert extract_files(query, paths=[["documents", "", "file"]]) == [ + ("documents[][file]", b"My first file"), + ("documents[][file]", b"My second file"), + ] + assert query == {"documents": [{}, {}]} + + +@pytest.mark.parametrize( + "query,paths,expected", + [ + [ + {"foo": {"bar": "baz"}}, + [["foo", "", "bar"]], + [], + ], + [ + {"foo": ["bar", "baz"]}, + [["foo", "bar"]], + [], + ], + [ + {"foo": {"bar": "baz"}}, + [["foo", "foo"]], + [], + ], + ], + ids=["dict expecting array", "array expecting dict", "unknown keys"], +) +def test_ignores_incorrect_paths( + query: dict[str, object], + paths: Sequence[Sequence[str]], + expected: list[tuple[str, FileTypes]], +) -> None: + assert extract_files(query, paths=paths) == expected diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 0000000..bdb3673 --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,51 @@ +from pathlib import Path + +import anyio +import pytest +from dirty_equals import IsDict, IsList, IsBytes, IsTuple + +from brapi._files import to_httpx_files, async_to_httpx_files + +readme_path = Path(__file__).parent.parent.joinpath("README.md") + + +def test_pathlib_includes_file_name() -> None: + result = to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +def test_tuple_input() -> None: + result = to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +@pytest.mark.asyncio +async def test_async_pathlib_includes_file_name() -> None: + result = await async_to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_supports_anyio_path() -> None: + result = await async_to_httpx_files({"file": anyio.Path(readme_path)}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_tuple_input() -> None: + result = await async_to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +def test_string_not_allowed() -> None: + with pytest.raises(TypeError, match="Expected file types input to be a FileContent type or to be a tuple"): + to_httpx_files( + { + "file": "foo", # type: ignore + } + ) diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..cb150da --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,963 @@ +import json +from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast +from datetime import datetime, timezone +from typing_extensions import Literal, Annotated, TypeAliasType + +import pytest +import pydantic +from pydantic import Field + +from brapi._utils import PropertyInfo +from brapi._compat import PYDANTIC_V1, parse_obj, model_dump, model_json +from brapi._models import BaseModel, construct_type + + +class BasicModel(BaseModel): + foo: str + + +@pytest.mark.parametrize("value", ["hello", 1], ids=["correct type", "mismatched"]) +def test_basic(value: object) -> None: + m = BasicModel.construct(foo=value) + assert m.foo == value + + +def test_directly_nested_model() -> None: + class NestedModel(BaseModel): + nested: BasicModel + + m = NestedModel.construct(nested={"foo": "Foo!"}) + assert m.nested.foo == "Foo!" + + # mismatched types + m = NestedModel.construct(nested="hello!") + assert cast(Any, m.nested) == "hello!" + + +def test_optional_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[BasicModel] + + m1 = NestedModel.construct(nested=None) + assert m1.nested is None + + m2 = NestedModel.construct(nested={"foo": "bar"}) + assert m2.nested is not None + assert m2.nested.foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested={"foo"}) + assert isinstance(cast(Any, m3.nested), set) + assert cast(Any, m3.nested) == {"foo"} + + +def test_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[BasicModel] + + m = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0].foo == "bar" + assert m.nested[1].foo == "2" + + # mismatched types + m = NestedModel.construct(nested=True) + assert cast(Any, m.nested) is True + + m = NestedModel.construct(nested=[False]) + assert cast(Any, m.nested) == [False] + + +def test_optional_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[List[BasicModel]] + + m1 = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m1.nested is not None + assert isinstance(m1.nested, list) + assert len(m1.nested) == 2 + assert m1.nested[0].foo == "bar" + assert m1.nested[1].foo == "2" + + m2 = NestedModel.construct(nested=None) + assert m2.nested is None + + # mismatched types + m3 = NestedModel.construct(nested={1}) + assert cast(Any, m3.nested) == {1} + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_optional_items_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[Optional[BasicModel]] + + m = NestedModel.construct(nested=[None, {"foo": "bar"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0] is None + assert m.nested[1] is not None + assert m.nested[1].foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested="foo") + assert cast(Any, m3.nested) == "foo" + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_mismatched_type() -> None: + class NestedModel(BaseModel): + nested: List[str] + + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_raw_dictionary() -> None: + class NestedModel(BaseModel): + nested: Dict[str, str] + + m = NestedModel.construct(nested={"hello": "world"}) + assert m.nested == {"hello": "world"} + + # mismatched types + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_nested_dictionary_model() -> None: + class NestedModel(BaseModel): + nested: Dict[str, BasicModel] + + m = NestedModel.construct(nested={"hello": {"foo": "bar"}}) + assert isinstance(m.nested, dict) + assert m.nested["hello"].foo == "bar" + + # mismatched types + m = NestedModel.construct(nested={"hello": False}) + assert cast(Any, m.nested["hello"]) is False + + +def test_unknown_fields() -> None: + m1 = BasicModel.construct(foo="foo", unknown=1) + assert m1.foo == "foo" + assert cast(Any, m1).unknown == 1 + + m2 = BasicModel.construct(foo="foo", unknown={"foo_bar": True}) + assert m2.foo == "foo" + assert cast(Any, m2).unknown == {"foo_bar": True} + + assert model_dump(m2) == {"foo": "foo", "unknown": {"foo_bar": True}} + + +def test_strict_validation_unknown_fields() -> None: + class Model(BaseModel): + foo: str + + model = parse_obj(Model, dict(foo="hello!", user="Robert")) + assert model.foo == "hello!" + assert cast(Any, model).user == "Robert" + + assert model_dump(model) == {"foo": "hello!", "user": "Robert"} + + +def test_aliases() -> None: + class Model(BaseModel): + my_field: int = Field(alias="myField") + + m = Model.construct(myField=1) + assert m.my_field == 1 + + # mismatched types + m = Model.construct(myField={"hello": False}) + assert cast(Any, m.my_field) == {"hello": False} + + +def test_repr() -> None: + model = BasicModel(foo="bar") + assert str(model) == "BasicModel(foo='bar')" + assert repr(model) == "BasicModel(foo='bar')" + + +def test_repr_nested_model() -> None: + class Child(BaseModel): + name: str + age: int + + class Parent(BaseModel): + name: str + child: Child + + model = Parent(name="Robert", child=Child(name="Foo", age=5)) + assert str(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + assert repr(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + + +def test_optional_list() -> None: + class Submodel(BaseModel): + name: str + + class Model(BaseModel): + items: Optional[List[Submodel]] + + m = Model.construct(items=None) + assert m.items is None + + m = Model.construct(items=[]) + assert m.items == [] + + m = Model.construct(items=[{"name": "Robert"}]) + assert m.items is not None + assert len(m.items) == 1 + assert m.items[0].name == "Robert" + + +def test_nested_union_of_models() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + +def test_nested_union_of_mixed_types() -> None: + class Submodel1(BaseModel): + bar: bool + + class Model(BaseModel): + foo: Union[Submodel1, Literal[True], Literal["CARD_HOLDER"]] + + m = Model.construct(foo=True) + assert m.foo is True + + m = Model.construct(foo="CARD_HOLDER") + assert m.foo == "CARD_HOLDER" + + m = Model.construct(foo={"bar": False}) + assert isinstance(m.foo, Submodel1) + assert m.foo.bar is False + + +def test_nested_union_multiple_variants() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Submodel3(BaseModel): + foo: int + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2, None, Submodel3] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + m = Model.construct(foo=None) + assert m.foo is None + + m = Model.construct() + assert m.foo is None + + m = Model.construct(foo={"foo": "1"}) + assert isinstance(m.foo, Submodel3) + assert m.foo.foo == 1 + + +def test_nested_union_invalid_data() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo=True) + assert cast(bool, m.foo) is True + + m = Model.construct(foo={"name": 3}) + if PYDANTIC_V1: + assert isinstance(m.foo, Submodel2) + assert m.foo.name == "3" + else: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore + + +def test_list_of_unions() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + items: List[Union[Submodel1, Submodel2]] + + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], Submodel2) + assert m.items[1].name == "Robert" + + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_union_of_lists() -> None: + class SubModel1(BaseModel): + level: int + + class SubModel2(BaseModel): + name: str + + class Model(BaseModel): + items: Union[List[SubModel1], List[SubModel2]] + + # with one valid entry + m = Model.construct(items=[{"name": "Robert"}]) + assert len(m.items) == 1 + assert isinstance(m.items[0], SubModel2) + assert m.items[0].name == "Robert" + + # with two entries pointing to different types + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], SubModel1) + assert cast(Any, m.items[1]).name == "Robert" + + # with two entries pointing to *completely* different types + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_dict_of_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Dict[str, Union[SubModel1, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel2) + assert m.data["foo"].foo == "bar" + + # TODO: test mismatched type + + +def test_double_nested_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + bar: str + + class Model(BaseModel): + data: Dict[str, List[Union[SubModel1, SubModel2]]] + + m = Model.construct(data={"foo": [{"bar": "baz"}, {"name": "Robert"}]}) + assert len(m.data["foo"]) == 2 + + entry1 = m.data["foo"][0] + assert isinstance(entry1, SubModel2) + assert entry1.bar == "baz" + + entry2 = m.data["foo"][1] + assert isinstance(entry2, SubModel1) + assert entry2.name == "Robert" + + # TODO: test mismatched type + + +def test_union_of_dict() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Union[Dict[str, SubModel1], Dict[str, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel1) + assert cast(Any, m.data["foo"]).foo == "bar" + + +def test_iso8601_datetime() -> None: + class Model(BaseModel): + created_at: datetime + + expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) + + if PYDANTIC_V1: + expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + else: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' + + model = Model.construct(created_at="2019-12-27T18:11:19.117Z") + assert model.created_at == expected + assert model_json(model) == expected_json + + model = parse_obj(Model, dict(created_at="2019-12-27T18:11:19.117Z")) + assert model.created_at == expected + assert model_json(model) == expected_json + + +def test_does_not_coerce_int() -> None: + class Model(BaseModel): + bar: int + + assert Model.construct(bar=1).bar == 1 + assert Model.construct(bar=10.9).bar == 10.9 + assert Model.construct(bar="19").bar == "19" # type: ignore[comparison-overlap] + assert Model.construct(bar=False).bar is False + + +def test_int_to_float_safe_conversion() -> None: + class Model(BaseModel): + float_field: float + + m = Model.construct(float_field=10) + assert m.float_field == 10.0 + assert isinstance(m.float_field, float) + + m = Model.construct(float_field=10.12) + assert m.float_field == 10.12 + assert isinstance(m.float_field, float) + + # number too big + m = Model.construct(float_field=2**53 + 1) + assert m.float_field == 2**53 + 1 + assert isinstance(m.float_field, int) + + +def test_deprecated_alias() -> None: + class Model(BaseModel): + resource_id: str = Field(alias="model_id") + + @property + def model_id(self) -> str: + return self.resource_id + + m = Model.construct(model_id="id") + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + m = parse_obj(Model, {"model_id": "id"}) + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + +def test_omitted_fields() -> None: + class Model(BaseModel): + resource_id: Optional[str] = None + + m = Model.construct() + assert m.resource_id is None + assert "resource_id" not in m.model_fields_set + + m = Model.construct(resource_id=None) + assert m.resource_id is None + assert "resource_id" in m.model_fields_set + + m = Model.construct(resource_id="foo") + assert m.resource_id == "foo" + assert "resource_id" in m.model_fields_set + + +def test_to_dict() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.to_dict() == {"FOO": "hello"} + assert m.to_dict(use_api_names=False) == {"foo": "hello"} + + m2 = Model() + assert m2.to_dict() == {} + assert m2.to_dict(exclude_unset=False) == {"FOO": None} + assert m2.to_dict(exclude_unset=False, exclude_none=True) == {} + assert m2.to_dict(exclude_unset=False, exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.to_dict() == {"FOO": None} + assert m3.to_dict(exclude_none=True) == {} + assert m3.to_dict(exclude_defaults=True) == {} + + class Model2(BaseModel): + created_at: datetime + + time_str = "2024-03-21T11:39:01.275859" + m4 = Model2.construct(created_at=time_str) + assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} + assert m4.to_dict(mode="json") == {"created_at": time_str} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_dict(warnings=False) + + +def test_forwards_compat_model_dump_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.model_dump() == {"foo": "hello"} + assert m.model_dump(include={"bar"}) == {} + assert m.model_dump(exclude={"foo"}) == {} + assert m.model_dump(by_alias=True) == {"FOO": "hello"} + + m2 = Model() + assert m2.model_dump() == {"foo": None} + assert m2.model_dump(exclude_unset=True) == {} + assert m2.model_dump(exclude_none=True) == {} + assert m2.model_dump(exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.model_dump() == {"foo": None} + assert m3.model_dump(exclude_none=True) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump(warnings=False) + + +def test_compat_method_no_error_for_warnings() -> None: + class Model(BaseModel): + foo: Optional[str] + + m = Model(foo="hello") + assert isinstance(model_dump(m, warnings=False), dict) + + +def test_to_json() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.to_json()) == {"FOO": "hello"} + assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} + + if PYDANTIC_V1: + assert m.to_json(indent=None) == '{"FOO": "hello"}' + else: + assert m.to_json(indent=None) == '{"FOO":"hello"}' + + m2 = Model() + assert json.loads(m2.to_json()) == {} + assert json.loads(m2.to_json(exclude_unset=False)) == {"FOO": None} + assert json.loads(m2.to_json(exclude_unset=False, exclude_none=True)) == {} + assert json.loads(m2.to_json(exclude_unset=False, exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.to_json()) == {"FOO": None} + assert json.loads(m3.to_json(exclude_none=True)) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_json(warnings=False) + + +def test_forwards_compat_model_dump_json_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.model_dump_json()) == {"foo": "hello"} + assert json.loads(m.model_dump_json(include={"bar"})) == {} + assert json.loads(m.model_dump_json(include={"foo"})) == {"foo": "hello"} + assert json.loads(m.model_dump_json(by_alias=True)) == {"FOO": "hello"} + + assert m.model_dump_json(indent=2) == '{\n "foo": "hello"\n}' + + m2 = Model() + assert json.loads(m2.model_dump_json()) == {"foo": None} + assert json.loads(m2.model_dump_json(exclude_unset=True)) == {} + assert json.loads(m2.model_dump_json(exclude_none=True)) == {} + assert json.loads(m2.model_dump_json(exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.model_dump_json()) == {"foo": None} + assert json.loads(m3.model_dump_json(exclude_none=True)) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump_json(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump_json(warnings=False) + + +def test_type_compat() -> None: + # our model type can be assigned to Pydantic's model type + + def takes_pydantic(model: pydantic.BaseModel) -> None: # noqa: ARG001 + ... + + class OurModel(BaseModel): + foo: Optional[str] = None + + takes_pydantic(OurModel()) + + +def test_annotated_types() -> None: + class Model(BaseModel): + value: str + + m = construct_type( + value={"value": "foo"}, + type_=cast(Any, Annotated[Model, "random metadata"]), + ) + assert isinstance(m, Model) + assert m.value == "foo" + + +def test_discriminated_unions_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, A) + assert m.type == "a" + if PYDANTIC_V1: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] + + +def test_discriminated_unions_unknown_variant() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "c", "data": None, "new_thing": "bar"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + + # just chooses the first variant + assert isinstance(m, A) + assert m.type == "c" # type: ignore[comparison-overlap] + assert m.data == None # type: ignore[unreachable] + assert m.new_thing == "bar" + + +def test_discriminated_unions_invalid_data_nested_unions() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + class C(BaseModel): + type: Literal["c"] + + data: bool + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "c", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, C) + assert m.type == "c" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_with_aliases_invalid_data() -> None: + class A(BaseModel): + foo_type: Literal["a"] = Field(alias="type") + + data: str + + class B(BaseModel): + foo_type: Literal["b"] = Field(alias="type") + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, B) + assert m.foo_type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, A) + assert m.foo_type == "a" + if PYDANTIC_V1: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] + + +def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["a"] + + data: int + + m = construct_type( + value={"type": "a", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "a" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_invalid_data_uses_cache() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + UnionType = cast(Any, Union[A, B]) + + assert not hasattr(UnionType, "__discriminator__") + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + discriminator = UnionType.__discriminator__ + assert discriminator is not None + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + # if the discriminator details object stays the same between invocations then + # we hit the cache + assert UnionType.__discriminator__ is discriminator + + +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") +def test_type_alias_type() -> None: + Alias = TypeAliasType("Alias", str) # pyright: ignore + + class Model(BaseModel): + alias: Alias + union: Union[int, Alias] + + m = construct_type(value={"alias": "foo", "union": "bar"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.alias, str) + assert m.alias == "foo" + assert isinstance(m.union, str) + assert m.union == "bar" + + +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") +def test_field_named_cls() -> None: + class Model(BaseModel): + cls: str + + m = construct_type(value={"cls": "foo"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.cls, str) + + +def test_discriminated_union_case() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["b"] + + data: List[Union[A, object]] + + class ModelA(BaseModel): + type: Literal["modelA"] + + data: int + + class ModelB(BaseModel): + type: Literal["modelB"] + + required: str + + data: Union[A, B] + + # when constructing ModelA | ModelB, value data doesn't match ModelB exactly - missing `required` + m = construct_type( + value={"type": "modelB", "data": {"type": "a", "data": True}}, + type_=cast(Any, Annotated[Union[ModelA, ModelB], PropertyInfo(discriminator="type")]), + ) + + assert isinstance(m, ModelB) + + +def test_nested_discriminated_union() -> None: + class InnerType1(BaseModel): + type: Literal["type_1"] + + class InnerModel(BaseModel): + inner_value: str + + class InnerType2(BaseModel): + type: Literal["type_2"] + some_inner_model: InnerModel + + class Type1(BaseModel): + base_type: Literal["base_type_1"] + value: Annotated[ + Union[ + InnerType1, + InnerType2, + ], + PropertyInfo(discriminator="type"), + ] + + class Type2(BaseModel): + base_type: Literal["base_type_2"] + + T = Annotated[ + Union[ + Type1, + Type2, + ], + PropertyInfo(discriminator="base_type"), + ] + + model = construct_type( + type_=T, + value={ + "base_type": "base_type_1", + "value": { + "type": "type_2", + }, + }, + ) + assert isinstance(model, Type1) + assert isinstance(model.value, InnerType2) + + +@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now") +def test_extra_properties() -> None: + class Item(BaseModel): + prop: int + + class Model(BaseModel): + __pydantic_extra__: Dict[str, Item] = Field(init=False) # pyright: ignore[reportIncompatibleVariableOverride] + + other: str + + if TYPE_CHECKING: + + def __getattr__(self, attr: str) -> Item: ... + + model = construct_type( + type_=Model, + value={ + "a": {"prop": 1}, + "other": "foo", + }, + ) + assert isinstance(model, Model) + assert model.a.prop == 1 + assert isinstance(model.a, Item) + assert model.other == "foo" diff --git a/tests/test_qs.py b/tests/test_qs.py new file mode 100644 index 0000000..99accfc --- /dev/null +++ b/tests/test_qs.py @@ -0,0 +1,78 @@ +from typing import Any, cast +from functools import partial +from urllib.parse import unquote + +import pytest + +from brapi._qs import Querystring, stringify + + +def test_empty() -> None: + assert stringify({}) == "" + assert stringify({"a": {}}) == "" + assert stringify({"a": {"b": {"c": {}}}}) == "" + + +def test_basic() -> None: + assert stringify({"a": 1}) == "a=1" + assert stringify({"a": "b"}) == "a=b" + assert stringify({"a": True}) == "a=true" + assert stringify({"a": False}) == "a=false" + assert stringify({"a": 1.23456}) == "a=1.23456" + assert stringify({"a": None}) == "" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_nested_dotted(method: str) -> None: + if method == "class": + serialise = Querystring(nested_format="dots").stringify + else: + serialise = partial(stringify, nested_format="dots") + + assert unquote(serialise({"a": {"b": "c"}})) == "a.b=c" + assert unquote(serialise({"a": {"b": "c", "d": "e", "f": "g"}})) == "a.b=c&a.d=e&a.f=g" + assert unquote(serialise({"a": {"b": {"c": {"d": "e"}}}})) == "a.b.c.d=e" + assert unquote(serialise({"a": {"b": True}})) == "a.b=true" + + +def test_nested_brackets() -> None: + assert unquote(stringify({"a": {"b": "c"}})) == "a[b]=c" + assert unquote(stringify({"a": {"b": "c", "d": "e", "f": "g"}})) == "a[b]=c&a[d]=e&a[f]=g" + assert unquote(stringify({"a": {"b": {"c": {"d": "e"}}}})) == "a[b][c][d]=e" + assert unquote(stringify({"a": {"b": True}})) == "a[b]=true" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_comma(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="comma").stringify + else: + serialise = partial(stringify, array_format="comma") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in=foo,bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b]=true,false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b]=true,false,true" + + +def test_array_repeat() -> None: + assert unquote(stringify({"in": ["foo", "bar"]})) == "in=foo&in=bar" + assert unquote(stringify({"a": {"b": [True, False]}})) == "a[b]=true&a[b]=false" + assert unquote(stringify({"a": {"b": [True, False, None, True]}})) == "a[b]=true&a[b]=false&a[b]=true" + assert unquote(stringify({"in": ["foo", {"b": {"c": ["d", "e"]}}]})) == "in=foo&in[b][c]=d&in[b][c]=e" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_brackets(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="brackets").stringify + else: + serialise = partial(stringify, array_format="brackets") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in[]=foo&in[]=bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b][]=true&a[b][]=false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b][]=true&a[b][]=false&a[b][]=true" + + +def test_unknown_array_format() -> None: + with pytest.raises(NotImplementedError, match="Unknown array_format value: foo, choose from comma, repeat"): + stringify({"a": ["foo", "bar"]}, array_format=cast(Any, "foo")) diff --git a/tests/test_required_args.py b/tests/test_required_args.py new file mode 100644 index 0000000..958fd4f --- /dev/null +++ b/tests/test_required_args.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import pytest + +from brapi._utils import required_args + + +def test_too_many_positional_params() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + with pytest.raises(TypeError, match=r"foo\(\) takes 1 argument\(s\) but 2 were given"): + foo("a", "b") # type: ignore + + +def test_positional_param() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + assert foo("a") == "a" + assert foo(None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_keyword_only_param() -> None: + @required_args(["a"]) + def foo(*, a: str | None = None) -> str | None: + return a + + assert foo(a="a") == "a" + assert foo(a=None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_multiple_params() -> None: + @required_args(["a", "b", "c"]) + def foo(a: str = "", *, b: str = "", c: str = "") -> str | None: + return f"{a} {b} {c}" + + assert foo(a="a", b="b", c="c") == "a b c" + + error_message = r"Missing required arguments.*" + + with pytest.raises(TypeError, match=error_message): + foo() + + with pytest.raises(TypeError, match=error_message): + foo(a="a") + + with pytest.raises(TypeError, match=error_message): + foo(b="b") + + with pytest.raises(TypeError, match=error_message): + foo(c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'a'"): + foo(b="a", c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'b'"): + foo("a", c="c") + + +def test_multiple_variants() -> None: + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: str | None = None) -> str | None: + return a if a is not None else b + + assert foo(a="foo") == "foo" + assert foo(b="bar") == "bar" + assert foo(a=None) is None + assert foo(b=None) is None + + # TODO: this error message could probably be improved + with pytest.raises( + TypeError, + match=r"Missing required arguments; Expected either \('a'\) or \('b'\) arguments to be given", + ): + foo() + + +def test_multiple_params_multiple_variants() -> None: + @required_args(["a", "b"], ["c"]) + def foo(*, a: str | None = None, b: str | None = None, c: str | None = None) -> str | None: + if a is not None: + return a + if b is not None: + return b + return c + + error_message = r"Missing required arguments; Expected either \('a' and 'b'\) or \('c'\) arguments to be given" + + with pytest.raises(TypeError, match=error_message): + foo(a="foo") + + with pytest.raises(TypeError, match=error_message): + foo(b="bar") + + with pytest.raises(TypeError, match=error_message): + foo() + + assert foo(a=None, b="bar") == "bar" + assert foo(c=None) is None + assert foo(c="foo") == "foo" diff --git a/tests/test_response.py b/tests/test_response.py new file mode 100644 index 0000000..5bdfd2e --- /dev/null +++ b/tests/test_response.py @@ -0,0 +1,277 @@ +import json +from typing import Any, List, Union, cast +from typing_extensions import Annotated + +import httpx +import pytest +import pydantic + +from brapi import Brapi, BaseModel, AsyncBrapi +from brapi._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + BinaryAPIResponse, + AsyncBinaryAPIResponse, + extract_response_type, +) +from brapi._streaming import Stream +from brapi._base_client import FinalRequestOptions + + +class ConcreteBaseAPIResponse(APIResponse[bytes]): ... + + +class ConcreteAPIResponse(APIResponse[List[str]]): ... + + +class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]): ... + + +def test_extract_response_type_direct_classes() -> None: + assert extract_response_type(BaseAPIResponse[str]) == str + assert extract_response_type(APIResponse[str]) == str + assert extract_response_type(AsyncAPIResponse[str]) == str + + +def test_extract_response_type_direct_class_missing_type_arg() -> None: + with pytest.raises( + RuntimeError, + match="Expected type to have a type argument at index 0 but it did not", + ): + extract_response_type(AsyncAPIResponse) + + +def test_extract_response_type_concrete_subclasses() -> None: + assert extract_response_type(ConcreteBaseAPIResponse) == bytes + assert extract_response_type(ConcreteAPIResponse) == List[str] + assert extract_response_type(ConcreteAsyncAPIResponse) == httpx.Response + + +def test_extract_response_type_binary_response() -> None: + assert extract_response_type(BinaryAPIResponse) == bytes + assert extract_response_type(AsyncBinaryAPIResponse) == bytes + + +class PydanticModel(pydantic.BaseModel): ... + + +def test_response_parse_mismatched_basemodel(client: Brapi) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from brapi import BaseModel`", + ): + response.parse(to=PydanticModel) + + +@pytest.mark.asyncio +async def test_async_response_parse_mismatched_basemodel(async_client: AsyncBrapi) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from brapi import BaseModel`", + ): + await response.parse(to=PydanticModel) + + +def test_response_parse_custom_stream(client: Brapi) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = response.parse(to=Stream[int]) + assert stream._cast_to == int + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_stream(async_client: AsyncBrapi) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = await response.parse(to=Stream[int]) + assert stream._cast_to == int + + +class CustomModel(BaseModel): + foo: str + bar: int + + +def test_response_parse_custom_model(client: Brapi) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_model(async_client: AsyncBrapi) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +def test_response_parse_annotated_type(client: Brapi) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +async def test_async_response_parse_annotated_type(async_client: AsyncBrapi) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +def test_response_parse_bool(client: Brapi, content: str, expected: bool) -> None: + response = APIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = response.parse(to=bool) + assert result is expected + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +async def test_async_response_parse_bool(client: AsyncBrapi, content: str, expected: bool) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = await response.parse(to=bool) + assert result is expected + + +class OtherModel(BaseModel): + a: str + + +@pytest.mark.parametrize("client", [False], indirect=True) # loose validation +def test_response_parse_expect_model_union_non_json_content(client: Brapi) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncBrapi) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" diff --git a/tests/test_streaming.py b/tests/test_streaming.py new file mode 100644 index 0000000..ab4a852 --- /dev/null +++ b/tests/test_streaming.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +from typing import Iterator, AsyncIterator + +import httpx +import pytest + +from brapi import Brapi, AsyncBrapi +from brapi._streaming import Stream, AsyncStream, ServerSentEvent + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_basic(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: completion\n" + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_missing_event(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_event_missing_data(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + yield b"event: completion\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events_with_data(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo":true}\n' + yield b"\n" + yield b"event: completion\n" + yield b'data: {"bar":false}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"bar": False} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines_with_empty_line(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: \n" + yield b"data:\n" + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + assert sse.data == '{\n"foo":\n\n\ntrue}' + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_json_escaped_double_new_line(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo": "my long\\n\\ncontent"}' + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": "my long\n\ncontent"} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines(sync: bool, client: Brapi, async_client: AsyncBrapi) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_special_new_line_character( + sync: bool, + client: Brapi, + async_client: AsyncBrapi, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":" culpa"}\n' + yield b"\n" + yield b'data: {"content":" \xe2\x80\xa8"}\n' + yield b"\n" + yield b'data: {"content":"foo"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " culpa"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " 
"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "foo"} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multi_byte_character_multiple_chunks( + sync: bool, + client: Brapi, + async_client: AsyncBrapi, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":"' + # bytes taken from the string 'известни' and arbitrarily split + # so that some multi-byte characters span multiple chunks + yield b"\xd0" + yield b"\xb8\xd0\xb7\xd0" + yield b"\xb2\xd0\xb5\xd1\x81\xd1\x82\xd0\xbd\xd0\xb8" + yield b'"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "известни"} + + +async def to_aiter(iter: Iterator[bytes]) -> AsyncIterator[bytes]: + for chunk in iter: + yield chunk + + +async def iter_next(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> ServerSentEvent: + if isinstance(iter, AsyncIterator): + return await iter.__anext__() + + return next(iter) + + +async def assert_empty_iter(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> None: + with pytest.raises((StopAsyncIteration, RuntimeError)): + await iter_next(iter) + + +def make_event_iterator( + content: Iterator[bytes], + *, + sync: bool, + client: Brapi, + async_client: AsyncBrapi, +) -> Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]: + if sync: + return Stream(cast_to=object, client=client, response=httpx.Response(200, content=content))._iter_events() + + return AsyncStream( + cast_to=object, client=async_client, response=httpx.Response(200, content=to_aiter(content)) + )._iter_events() diff --git a/tests/test_transform.py b/tests/test_transform.py new file mode 100644 index 0000000..90f69e9 --- /dev/null +++ b/tests/test_transform.py @@ -0,0 +1,460 @@ +from __future__ import annotations + +import io +import pathlib +from typing import Any, Dict, List, Union, TypeVar, Iterable, Optional, cast +from datetime import date, datetime +from typing_extensions import Required, Annotated, TypedDict + +import pytest + +from brapi._types import Base64FileInput, omit, not_given +from brapi._utils import ( + PropertyInfo, + transform as _transform, + parse_datetime, + async_transform as _async_transform, +) +from brapi._compat import PYDANTIC_V1 +from brapi._models import BaseModel + +_T = TypeVar("_T") + +SAMPLE_FILE_PATH = pathlib.Path(__file__).parent.joinpath("sample_file.txt") + + +async def transform( + data: _T, + expected_type: object, + use_async: bool, +) -> _T: + if use_async: + return await _async_transform(data, expected_type=expected_type) + + return _transform(data, expected_type=expected_type) + + +parametrize = pytest.mark.parametrize("use_async", [False, True], ids=["sync", "async"]) + + +class Foo1(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +@parametrize +@pytest.mark.asyncio +async def test_top_level_alias(use_async: bool) -> None: + assert await transform({"foo_bar": "hello"}, expected_type=Foo1, use_async=use_async) == {"fooBar": "hello"} + + +class Foo2(TypedDict): + bar: Bar2 + + +class Bar2(TypedDict): + this_thing: Annotated[int, PropertyInfo(alias="this__thing")] + baz: Annotated[Baz2, PropertyInfo(alias="Baz")] + + +class Baz2(TypedDict): + my_baz: Annotated[str, PropertyInfo(alias="myBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_recursive_typeddict(use_async: bool) -> None: + assert await transform({"bar": {"this_thing": 1}}, Foo2, use_async) == {"bar": {"this__thing": 1}} + assert await transform({"bar": {"baz": {"my_baz": "foo"}}}, Foo2, use_async) == {"bar": {"Baz": {"myBaz": "foo"}}} + + +class Foo3(TypedDict): + things: List[Bar3] + + +class Bar3(TypedDict): + my_field: Annotated[str, PropertyInfo(alias="myField")] + + +@parametrize +@pytest.mark.asyncio +async def test_list_of_typeddict(use_async: bool) -> None: + result = await transform({"things": [{"my_field": "foo"}, {"my_field": "foo2"}]}, Foo3, use_async) + assert result == {"things": [{"myField": "foo"}, {"myField": "foo2"}]} + + +class Foo4(TypedDict): + foo: Union[Bar4, Baz4] + + +class Bar4(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz4(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_typeddict(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo4, use_async) == {"foo": {"fooBar": "bar"}} + assert await transform({"foo": {"foo_baz": "baz"}}, Foo4, use_async) == {"foo": {"fooBaz": "baz"}} + assert await transform({"foo": {"foo_baz": "baz", "foo_bar": "bar"}}, Foo4, use_async) == { + "foo": {"fooBaz": "baz", "fooBar": "bar"} + } + + +class Foo5(TypedDict): + foo: Annotated[Union[Bar4, List[Baz4]], PropertyInfo(alias="FOO")] + + +class Bar5(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz5(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_list(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo5, use_async) == {"FOO": {"fooBar": "bar"}} + assert await transform( + { + "foo": [ + {"foo_baz": "baz"}, + {"foo_baz": "baz"}, + ] + }, + Foo5, + use_async, + ) == {"FOO": [{"fooBaz": "baz"}, {"fooBaz": "baz"}]} + + +class Foo6(TypedDict): + bar: Annotated[str, PropertyInfo(alias="Bar")] + + +@parametrize +@pytest.mark.asyncio +async def test_includes_unknown_keys(use_async: bool) -> None: + assert await transform({"bar": "bar", "baz_": {"FOO": 1}}, Foo6, use_async) == { + "Bar": "bar", + "baz_": {"FOO": 1}, + } + + +class Foo7(TypedDict): + bar: Annotated[List[Bar7], PropertyInfo(alias="bAr")] + foo: Bar7 + + +class Bar7(TypedDict): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_ignores_invalid_input(use_async: bool) -> None: + assert await transform({"bar": ""}, Foo7, use_async) == {"bAr": ""} + assert await transform({"foo": ""}, Foo7, use_async) == {"foo": ""} + + +class DatetimeDict(TypedDict, total=False): + foo: Annotated[datetime, PropertyInfo(format="iso8601")] + + bar: Annotated[Optional[datetime], PropertyInfo(format="iso8601")] + + required: Required[Annotated[Optional[datetime], PropertyInfo(format="iso8601")]] + + list_: Required[Annotated[Optional[List[datetime]], PropertyInfo(format="iso8601")]] + + union: Annotated[Union[int, datetime], PropertyInfo(format="iso8601")] + + +class DateDict(TypedDict, total=False): + foo: Annotated[date, PropertyInfo(format="iso8601")] + + +class DatetimeModel(BaseModel): + foo: datetime + + +class DateModel(BaseModel): + foo: Optional[date] + + +@parametrize +@pytest.mark.asyncio +async def test_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + tz = "+00:00" if PYDANTIC_V1 else "Z" + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] + + dt = dt.replace(tzinfo=None) + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + + assert await transform({"foo": None}, DateDict, use_async) == {"foo": None} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=None), Any, use_async) == {"foo": None} # type: ignore + assert await transform({"foo": date.fromisoformat("2023-02-23")}, DateDict, use_async) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=date.fromisoformat("2023-02-23")), DateDict, use_async) == { + "foo": "2023-02-23" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_optional_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"bar": dt}, DatetimeDict, use_async) == {"bar": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + + assert await transform({"bar": None}, DatetimeDict, use_async) == {"bar": None} + + +@parametrize +@pytest.mark.asyncio +async def test_required_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"required": dt}, DatetimeDict, use_async) == { + "required": "2023-02-23T14:16:36.337692+00:00" + } # type: ignore[comparison-overlap] + + assert await transform({"required": None}, DatetimeDict, use_async) == {"required": None} + + +@parametrize +@pytest.mark.asyncio +async def test_union_datetime(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"union": dt}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "union": "2023-02-23T14:16:36.337692+00:00" + } + + assert await transform({"union": "foo"}, DatetimeDict, use_async) == {"union": "foo"} + + +@parametrize +@pytest.mark.asyncio +async def test_nested_list_iso6801_format(use_async: bool) -> None: + dt1 = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + dt2 = parse_datetime("2022-01-15T06:34:23Z") + assert await transform({"list_": [dt1, dt2]}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "list_": ["2023-02-23T14:16:36.337692+00:00", "2022-01-15T06:34:23+00:00"] + } + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_custom_format(use_async: bool) -> None: + dt = parse_datetime("2022-01-15T06:34:23Z") + + result = await transform(dt, Annotated[datetime, PropertyInfo(format="custom", format_template="%H")], use_async) + assert result == "06" # type: ignore[comparison-overlap] + + +class DateDictWithRequiredAlias(TypedDict, total=False): + required_prop: Required[Annotated[date, PropertyInfo(format="iso8601", alias="prop")]] + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_with_alias(use_async: bool) -> None: + assert await transform({"required_prop": None}, DateDictWithRequiredAlias, use_async) == {"prop": None} # type: ignore[comparison-overlap] + assert await transform( + {"required_prop": date.fromisoformat("2023-02-23")}, DateDictWithRequiredAlias, use_async + ) == {"prop": "2023-02-23"} # type: ignore[comparison-overlap] + + +class MyModel(BaseModel): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_model_to_dictionary(use_async: bool) -> None: + assert cast(Any, await transform(MyModel(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + assert cast(Any, await transform(MyModel.construct(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_empty_model(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(), Any, use_async)) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_unknown_field(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(my_untyped_field=True), Any, use_async)) == { + "my_untyped_field": True + } + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_types(use_async: bool) -> None: + model = MyModel.construct(foo=True) + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": True} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_object_type(use_async: bool) -> None: + model = MyModel.construct(foo=MyModel.construct(hello="world")) + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": {"hello": "world"}} + + +class ModelNestedObjects(BaseModel): + nested: MyModel + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_nested_objects(use_async: bool) -> None: + model = ModelNestedObjects.construct(nested={"foo": "stainless"}) + assert isinstance(model.nested, MyModel) + assert cast(Any, await transform(model, Any, use_async)) == {"nested": {"foo": "stainless"}} + + +class ModelWithDefaultField(BaseModel): + foo: str + with_none_default: Union[str, None] = None + with_str_default: str = "foo" + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_default_field(use_async: bool) -> None: + # should be excluded when defaults are used + model = ModelWithDefaultField.construct() + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {} + + # should be included when the default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default=None, with_str_default="foo") + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": None, "with_str_default": "foo"} + + # should be included when a non-default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default="bar", with_str_default="baz") + assert model.with_none_default == "bar" + assert model.with_str_default == "baz" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": "bar", "with_str_default": "baz"} + + +class TypedDictIterableUnion(TypedDict): + foo: Annotated[Union[Bar8, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +class Bar8(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz8(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_of_dictionaries(use_async: bool) -> None: + assert await transform({"foo": [{"foo_baz": "bar"}]}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "bar"}] + } + assert cast(Any, await transform({"foo": ({"foo_baz": "bar"},)}, TypedDictIterableUnion, use_async)) == { + "FOO": [{"fooBaz": "bar"}] + } + + def my_iter() -> Iterable[Baz8]: + yield {"foo_baz": "hello"} + yield {"foo_baz": "world"} + + assert await transform({"foo": my_iter()}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "hello"}, {"fooBaz": "world"}] + } + + +@parametrize +@pytest.mark.asyncio +async def test_dictionary_items(use_async: bool) -> None: + class DictItems(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + assert await transform({"foo": {"foo_baz": "bar"}}, Dict[str, DictItems], use_async) == {"foo": {"fooBaz": "bar"}} + + +class TypedDictIterableUnionStr(TypedDict): + foo: Annotated[Union[str, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_union_str(use_async: bool) -> None: + assert await transform({"foo": "bar"}, TypedDictIterableUnionStr, use_async) == {"FOO": "bar"} + assert cast(Any, await transform(iter([{"foo_baz": "bar"}]), Union[str, Iterable[Baz8]], use_async)) == [ + {"fooBaz": "bar"} + ] + + +class TypedDictBase64Input(TypedDict): + foo: Annotated[Union[str, Base64FileInput], PropertyInfo(format="base64")] + + +@parametrize +@pytest.mark.asyncio +async def test_base64_file_input(use_async: bool) -> None: + # strings are left as-is + assert await transform({"foo": "bar"}, TypedDictBase64Input, use_async) == {"foo": "bar"} + + # pathlib.Path is automatically converted to base64 + assert await transform({"foo": SAMPLE_FILE_PATH}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQo=" + } # type: ignore[comparison-overlap] + + # io instances are automatically converted to base64 + assert await transform({"foo": io.StringIO("Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + assert await transform({"foo": io.BytesIO(b"Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_transform_skipping(use_async: bool) -> None: + # lists of ints are left as-is + data = [1, 2, 3] + assert await transform(data, List[int], use_async) is data + + # iterables of ints are converted to a list + data = iter([1, 2, 3]) + assert await transform(data, Iterable[int], use_async) == [1, 2, 3] + + +@parametrize +@pytest.mark.asyncio +async def test_strips_notgiven(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": not_given}, Foo1, use_async) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_strips_omit(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": omit}, Foo1, use_async) == {} diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py new file mode 100644 index 0000000..0017d67 --- /dev/null +++ b/tests/test_utils/test_datetime_parse.py @@ -0,0 +1,110 @@ +""" +Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py +with modifications so it works without pydantic v1 imports. +""" + +from typing import Type, Union +from datetime import date, datetime, timezone, timedelta + +import pytest + +from brapi._utils import parse_date, parse_datetime + + +def create_tz(minutes: int) -> timezone: + return timezone(timedelta(minutes=minutes)) + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + ("1494012444.883309", date(2017, 5, 5)), + (b"1494012444.883309", date(2017, 5, 5)), + (1_494_012_444.883_309, date(2017, 5, 5)), + ("1494012444", date(2017, 5, 5)), + (1_494_012_444, date(2017, 5, 5)), + (0, date(1970, 1, 1)), + ("2012-04-23", date(2012, 4, 23)), + (b"2012-04-23", date(2012, 4, 23)), + ("2012-4-9", date(2012, 4, 9)), + (date(2012, 4, 9), date(2012, 4, 9)), + (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), + # Invalid inputs + ("x20120423", ValueError), + ("2012-04-56", ValueError), + (19_999_999_999, date(2603, 10, 11)), # just before watershed + (20_000_000_001, date(1970, 8, 20)), # just after watershed + (1_549_316_052, date(2019, 2, 4)), # nowish in s + (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms + (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs + (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ("infinity", date(9999, 12, 31)), + ("inf", date(9999, 12, 31)), + (float("inf"), date(9999, 12, 31)), + ("infinity ", date(9999, 12, 31)), + (int("1" + "0" * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ("-infinity", date(1, 1, 1)), + ("-inf", date(1, 1, 1)), + ("nan", ValueError), + ], +) +def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_date(value) + else: + assert parse_date(value) == result + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + # values in seconds + ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + # values in ms + ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), + ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), + (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), + ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), + ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), + ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), + ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (datetime(2017, 5, 5), datetime(2017, 5, 5)), + (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), + # Invalid inputs + ("x20120423091500", ValueError), + ("2012-04-56T09:15:90", ValueError), + ("2012-04-23T11:05:00-25:00", ValueError), + (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed + (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed + (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s + (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms + (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs + (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("-infinity", datetime(1, 1, 1, 0, 0)), + ("-inf", datetime(1, 1, 1, 0, 0)), + ("nan", ValueError), + ], +) +def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_datetime(value) + else: + assert parse_datetime(value) == result diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py new file mode 100644 index 0000000..eae01e5 --- /dev/null +++ b/tests/test_utils/test_proxy.py @@ -0,0 +1,34 @@ +import operator +from typing import Any +from typing_extensions import override + +from brapi._utils import LazyProxy + + +class RecursiveLazyProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + return self + + def __call__(self, *_args: Any, **_kwds: Any) -> Any: + raise RuntimeError("This should never be called!") + + +def test_recursive_proxy() -> None: + proxy = RecursiveLazyProxy() + assert repr(proxy) == "RecursiveLazyProxy" + assert str(proxy) == "RecursiveLazyProxy" + assert dir(proxy) == [] + assert type(proxy).__name__ == "RecursiveLazyProxy" + assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy" + + +def test_isinstance_does_not_error() -> None: + class AlwaysErrorProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + raise RuntimeError("Mocking missing dependency") + + proxy = AlwaysErrorProxy() + assert not isinstance(proxy, dict) + assert isinstance(proxy, LazyProxy) diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py new file mode 100644 index 0000000..c2aca49 --- /dev/null +++ b/tests/test_utils/test_typing.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from typing import Generic, TypeVar, cast + +from brapi._utils import extract_type_var_from_base + +_T = TypeVar("_T") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class BaseGeneric(Generic[_T]): ... + + +class SubclassGeneric(BaseGeneric[_T]): ... + + +class BaseGenericMultipleTypeArgs(Generic[_T, _T2, _T3]): ... + + +class SubclassGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T, _T2, _T3]): ... + + +class SubclassDifferentOrderGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T2, _T, _T3]): ... + + +def test_extract_type_var() -> None: + assert ( + extract_type_var_from_base( + BaseGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_generic_subclass() -> None: + assert ( + extract_type_var_from_base( + SubclassGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_multiple() -> None: + typ = BaseGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_multiple() -> None: + typ = SubclassGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_different_ordering_multiple() -> None: + typ = SubclassDifferentOrderGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) diff --git a/tests/uploads.test.ts b/tests/uploads.test.ts deleted file mode 100644 index 89decbb..0000000 --- a/tests/uploads.test.ts +++ /dev/null @@ -1,107 +0,0 @@ -import fs from 'fs'; -import type { ResponseLike } from 'brapi/internal/to-file'; -import { toFile } from 'brapi/core/uploads'; -import { File } from 'node:buffer'; - -class MyClass { - name: string = 'foo'; -} - -function mockResponse({ url, content }: { url: string; content?: Blob }): ResponseLike { - return { - url, - blob: async () => content || new Blob([]), - }; -} - -describe('toFile', () => { - it('throws a helpful error for mismatched types', async () => { - await expect( - // @ts-expect-error intentionally mismatched type - toFile({ foo: 'string' }), - ).rejects.toThrowErrorMatchingInlineSnapshot( - `"Unexpected data type: object; constructor: Object; props: ["foo"]"`, - ); - - await expect( - // @ts-expect-error intentionally mismatched type - toFile(new MyClass()), - ).rejects.toThrowErrorMatchingInlineSnapshot( - `"Unexpected data type: object; constructor: MyClass; props: ["name"]"`, - ); - }); - - it('disallows string at the type-level', async () => { - // @ts-expect-error we intentionally do not type support for `string` - // to help people avoid passing a file path - const file = await toFile('contents'); - expect(file.text()).resolves.toEqual('contents'); - }); - - it('extracts a file name from a Response', async () => { - const response = mockResponse({ url: 'https://example.com/my/audio.mp3' }); - const file = await toFile(response); - expect(file.name).toEqual('audio.mp3'); - }); - - it('extracts a file name from a File', async () => { - const input = new File(['foo'], 'input.jsonl'); - const file = await toFile(input); - expect(file.name).toEqual('input.jsonl'); - }); - - it('extracts a file name from a ReadStream', async () => { - const input = fs.createReadStream('tests/uploads.test.ts'); - const file = await toFile(input); - expect(file.name).toEqual('uploads.test.ts'); - }); - - it('does not copy File objects', async () => { - const input = new File(['foo'], 'input.jsonl', { type: 'jsonl' }); - const file = await toFile(input); - expect(file).toBe(input); - expect(file.name).toEqual('input.jsonl'); - expect(file.type).toBe('jsonl'); - }); - - it('is assignable to File and Blob', async () => { - const input = new File(['foo'], 'input.jsonl', { type: 'jsonl' }); - const result = await toFile(input); - const file: File = result; - const blob: Blob = result; - void file, blob; - }); -}); - -describe('missing File error message', () => { - let prevGlobalFile: unknown; - let prevNodeFile: unknown; - beforeEach(() => { - // The file shim captures the global File object when it's first imported. - // Reset modules before each test so we can test the error thrown when it's undefined. - jest.resetModules(); - const buffer = require('node:buffer'); - // @ts-ignore - prevGlobalFile = globalThis.File; - prevNodeFile = buffer.File; - // @ts-ignore - globalThis.File = undefined; - buffer.File = undefined; - }); - afterEach(() => { - // Clean up - // @ts-ignore - globalThis.File = prevGlobalFile; - require('node:buffer').File = prevNodeFile; - jest.resetModules(); - }); - - test('is thrown', async () => { - const uploads = await import('brapi/core/uploads'); - await expect( - uploads.toFile(mockResponse({ url: 'https://example.com/my/audio.mp3' })), - ).rejects.toMatchInlineSnapshot( - `[Error: \`File\` is not defined as a global, which is required for file uploads.]`, - ); - }); -}); diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..008bc85 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import os +import inspect +import traceback +import contextlib +from typing import Any, TypeVar, Iterator, Sequence, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, get_origin, assert_type + +from brapi._types import Omit, NoneType +from brapi._utils import ( + is_dict, + is_list, + is_list_type, + is_union_type, + extract_type_arg, + is_sequence_type, + is_annotated_type, + is_type_alias_type, +) +from brapi._compat import PYDANTIC_V1, field_outer_type, get_model_fields +from brapi._models import BaseModel + +BaseModelT = TypeVar("BaseModelT", bound=BaseModel) + + +def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: + for name, field in get_model_fields(model).items(): + field_value = getattr(value, name) + if PYDANTIC_V1: + # in v1 nullability was structured differently + # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields + allow_none = getattr(field, "allow_none", False) + else: + allow_none = False + + assert_matches_type( + field_outer_type(field), + field_value, + path=[*path, name], + allow_none=allow_none, + ) + + return True + + +# Note: the `path` argument is only used to improve error messages when `--showlocals` is used +def assert_matches_type( + type_: Any, + value: object, + *, + path: list[str], + allow_none: bool = False, +) -> None: + if is_type_alias_type(type_): + type_ = type_.__value__ + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(type_): + type_ = extract_type_arg(type_, 0) + + if allow_none and value is None: + return + + if type_ is None or type_ is NoneType: + assert value is None + return + + origin = get_origin(type_) or type_ + + if is_list_type(type_): + return _assert_list_type(type_, value) + + if is_sequence_type(type_): + assert isinstance(value, Sequence) + inner_type = get_args(type_)[0] + for entry in value: # type: ignore + assert_type(inner_type, entry) # type: ignore + return + + if origin == str: + assert isinstance(value, str) + elif origin == int: + assert isinstance(value, int) + elif origin == bool: + assert isinstance(value, bool) + elif origin == float: + assert isinstance(value, float) + elif origin == bytes: + assert isinstance(value, bytes) + elif origin == datetime: + assert isinstance(value, datetime) + elif origin == date: + assert isinstance(value, date) + elif origin == object: + # nothing to do here, the expected type is unknown + pass + elif origin == Literal: + assert value in get_args(type_) + elif origin == dict: + assert is_dict(value) + + args = get_args(type_) + key_type = args[0] + items_type = args[1] + + for key, item in value.items(): + assert_matches_type(key_type, key, path=[*path, ""]) + assert_matches_type(items_type, item, path=[*path, ""]) + elif is_union_type(type_): + variants = get_args(type_) + + try: + none_index = variants.index(type(None)) + except ValueError: + pass + else: + # special case Optional[T] for better error messages + if len(variants) == 2: + if value is None: + # valid + return + + return assert_matches_type(type_=variants[not none_index], value=value, path=path) + + for i, variant in enumerate(variants): + try: + assert_matches_type(variant, value, path=[*path, f"variant {i}"]) + return + except AssertionError: + traceback.print_exc() + continue + + raise AssertionError("Did not match any variants") + elif issubclass(origin, BaseModel): + assert isinstance(value, type_) + assert assert_matches_model(type_, cast(Any, value), path=path) + elif inspect.isclass(origin) and origin.__name__ == "HttpxBinaryResponseContent": + assert value.__class__.__name__ == "HttpxBinaryResponseContent" + else: + assert None, f"Unhandled field type: {type_}" + + +def _assert_list_type(type_: type[object], value: object) -> None: + assert is_list(value) + + inner_type = get_args(type_)[0] + for entry in value: + assert_type(inner_type, entry) # type: ignore + + +@contextlib.contextmanager +def update_env(**new_env: str | Omit) -> Iterator[None]: + old = os.environ.copy() + + try: + for name, value in new_env.items(): + if isinstance(value, Omit): + os.environ.pop(name, None) + else: + os.environ[name] = value + + yield None + finally: + os.environ.clear() + os.environ.update(old) diff --git a/tsc-multi.json b/tsc-multi.json deleted file mode 100644 index 384ddac..0000000 --- a/tsc-multi.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "targets": [ - { - "extname": ".js", - "module": "commonjs", - "shareHelpers": "internal/tslib.js" - }, - { - "extname": ".mjs", - "module": "esnext", - "shareHelpers": "internal/tslib.mjs" - } - ], - "projects": ["tsconfig.build.json"] -} diff --git a/tsconfig.build.json b/tsconfig.build.json deleted file mode 100644 index 30c103a..0000000 --- a/tsconfig.build.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "extends": "./tsconfig.json", - "include": ["dist/src"], - "exclude": [], - "compilerOptions": { - "rootDir": "./dist/src", - "paths": { - "brapi/*": ["./dist/src/*"], - "brapi": ["./dist/src/index.ts"] - }, - "noEmit": false, - "declaration": true, - "declarationMap": true, - "outDir": "dist", - "pretty": true, - "sourceMap": true - } -} diff --git a/tsconfig.deno.json b/tsconfig.deno.json deleted file mode 100644 index 849e070..0000000 --- a/tsconfig.deno.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "./tsconfig.json", - "include": ["dist-deno"], - "exclude": [], - "compilerOptions": { - "rootDir": "./dist-deno", - "lib": ["es2020", "DOM"], - "noEmit": true, - "declaration": true, - "declarationMap": true, - "outDir": "dist-deno", - "pretty": true, - "sourceMap": true - } -} diff --git a/tsconfig.dist-src.json b/tsconfig.dist-src.json deleted file mode 100644 index c550e29..0000000 --- a/tsconfig.dist-src.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - // this config is included in the published src directory to prevent TS errors - // from appearing when users go to source, and VSCode opens the source .ts file - // via declaration maps - "include": ["index.ts"], - "compilerOptions": { - "target": "ES2015", - "lib": ["DOM", "DOM.Iterable", "ES2018"], - "moduleResolution": "node" - } -} diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index 6bc4d33..0000000 --- a/tsconfig.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "include": ["src", "tests", "examples"], - "exclude": [], - "compilerOptions": { - "target": "es2020", - "lib": ["es2020"], - "module": "commonjs", - "moduleResolution": "node", - "esModuleInterop": true, - "paths": { - "brapi/*": ["./src/*"], - "brapi": ["./src/index.ts"] - }, - "noEmit": true, - - "resolveJsonModule": true, - - "forceConsistentCasingInFileNames": true, - - "strict": true, - "noImplicitAny": true, - "strictNullChecks": true, - "strictFunctionTypes": true, - "strictBindCallApply": true, - "strictPropertyInitialization": true, - "noImplicitThis": true, - "noImplicitReturns": true, - "alwaysStrict": true, - "exactOptionalPropertyTypes": true, - "noUncheckedIndexedAccess": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "isolatedModules": false, - - "skipLibCheck": true - } -} diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index 8311caf..0000000 --- a/yarn.lock +++ /dev/null @@ -1,3500 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@aashutoshrathi/word-wrap@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" - integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== - -"@ampproject/remapping@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" - integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@andrewbranch/untar.js@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@andrewbranch/untar.js/-/untar.js-1.0.3.tgz#ba9494f85eb83017c5c855763969caf1d0adea00" - integrity sha512-Jh15/qVmrLGhkKJBdXlK1+9tY4lZruYjsgkDFj08ZmDiWVBLJcqkok7Z0/R0In+i1rScBpJlSvrTS2Lm41Pbnw== - -"@arethetypeswrong/cli@^0.17.0": - version "0.17.0" - resolved "https://registry.yarnpkg.com/@arethetypeswrong/cli/-/cli-0.17.0.tgz#f97f10926b3f9f9eb5117550242d2e06c25cadac" - integrity sha512-xSMW7bfzVWpYw5JFgZqBXqr6PdR0/REmn3DkxCES5N0JTcB0CVgbIynJCvKBFmXaPc3hzmmTrb7+yPDRoOSZdA== - dependencies: - "@arethetypeswrong/core" "0.17.0" - chalk "^4.1.2" - cli-table3 "^0.6.3" - commander "^10.0.1" - marked "^9.1.2" - marked-terminal "^7.1.0" - semver "^7.5.4" - -"@arethetypeswrong/core@0.17.0": - version "0.17.0" - resolved "https://registry.yarnpkg.com/@arethetypeswrong/core/-/core-0.17.0.tgz#abb3b5f425056d37193644c2a2de4aecf866b76b" - integrity sha512-FHyhFizXNetigTVsIhqXKGYLpazPS5YNojEPpZEUcBPt9wVvoEbNIvG+hybuBR+pjlRcbyuqhukHZm1fr+bDgA== - dependencies: - "@andrewbranch/untar.js" "^1.0.3" - cjs-module-lexer "^1.2.3" - fflate "^0.8.2" - lru-cache "^10.4.3" - semver "^7.5.4" - typescript "5.6.1-rc" - validate-npm-package-name "^5.0.0" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" - integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== - dependencies: - "@babel/highlight" "^7.23.4" - chalk "^2.4.2" - -"@babel/compat-data@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" - integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== - -"@babel/core@^7.11.6", "@babel/core@^7.12.3": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.6.tgz#8be77cd77c55baadcc1eae1c33df90ab6d2151d4" - integrity sha512-FxpRyGjrMJXh7X3wGLGhNDCRiwpWEF74sKjTLDJSG5Kyvow3QZaG0Adbqzi9ZrVjTWpsX+2cxWXD71NMg93kdw== - dependencies: - "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-compilation-targets" "^7.23.6" - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helpers" "^7.23.6" - "@babel/parser" "^7.23.6" - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.6" - "@babel/types" "^7.23.6" - convert-source-map "^2.0.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.3" - semver "^6.3.1" - -"@babel/generator@^7.23.6", "@babel/generator@^7.7.2": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e" - integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw== - dependencies: - "@babel/types" "^7.23.6" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" - -"@babel/helper-compilation-targets@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz#4d79069b16cbcf1461289eccfbbd81501ae39991" - integrity sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ== - dependencies: - "@babel/compat-data" "^7.23.5" - "@babel/helper-validator-option" "^7.23.5" - browserslist "^4.22.2" - lru-cache "^5.1.1" - semver "^6.3.1" - -"@babel/helper-environment-visitor@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" - integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== - -"@babel/helper-function-name@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" - integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== - dependencies: - "@babel/template" "^7.22.15" - "@babel/types" "^7.23.0" - -"@babel/helper-hoist-variables@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" - integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-module-imports@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" - integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== - dependencies: - "@babel/types" "^7.22.15" - -"@babel/helper-module-transforms@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" - integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== - dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-module-imports" "^7.22.15" - "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/helper-validator-identifier" "^7.22.20" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" - integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== - -"@babel/helper-simple-access@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" - integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-split-export-declaration@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" - integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-string-parser@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" - integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== - -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== - -"@babel/helper-validator-option@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" - integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== - -"@babel/helpers@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.6.tgz#d03af2ee5fb34691eec0cda90f5ecbb4d4da145a" - integrity sha512-wCfsbN4nBidDRhpDhvcKlzHWCTlgJYUUdSJfzXb2NuBssDSIjc3xcb+znA7l+zYsFljAcGM0aFkN40cR3lXiGA== - dependencies: - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.6" - "@babel/types" "^7.23.6" - -"@babel/highlight@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" - integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== - dependencies: - "@babel/helper-validator-identifier" "^7.22.20" - chalk "^2.4.2" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.15", "@babel/parser@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" - integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.7.2": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz#8f2e4f8a9b5f9aa16067e142c1ac9cd9f810f473" - integrity sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.7.2": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.23.3.tgz#24f460c85dbbc983cd2b9c4994178bcc01df958f" - integrity sha512-9EiNjVJOMwCO+43TqoTrgQ8jMwcAd0sWyXi9RPfIsLTj4R2MADDDQXELhffaUx/uJv2AYcxBgPwH6j4TIA4ytQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/template@^7.22.15", "@babel/template@^7.3.3": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" - integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/parser" "^7.22.15" - "@babel/types" "^7.22.15" - -"@babel/traverse@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.6.tgz#b53526a2367a0dd6edc423637f3d2d0f2521abc5" - integrity sha512-czastdK1e8YByZqezMPFiZ8ahwVMh/ESl9vPgvgdB9AmFMGP5jfpFax74AQgl5zj4XHzqeYAg2l8PuUeRS1MgQ== - dependencies: - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.23.6" - "@babel/types" "^7.23.6" - debug "^4.3.1" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.6", "@babel/types@^7.3.3": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd" - integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg== - dependencies: - "@babel/helper-string-parser" "^7.23.4" - "@babel/helper-validator-identifier" "^7.22.20" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@colors/colors@1.5.0": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" - integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== - -"@cspotcode/source-map-consumer@0.8.0": - version "0.8.0" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" - integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg== - -"@cspotcode/source-map-support@0.7.0": - version "0.7.0" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5" - integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA== - dependencies: - "@cspotcode/source-map-consumer" "0.8.0" - -"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": - version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" - integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== - dependencies: - eslint-visitor-keys "^3.3.0" - -"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.12.1": - version "4.12.1" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.1.tgz#cfc6cffe39df390a3841cde2abccf92eaa7ae0e0" - integrity sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ== - -"@eslint/config-array@^0.19.0": - version "0.19.2" - resolved "https://registry.yarnpkg.com/@eslint/config-array/-/config-array-0.19.2.tgz#3060b809e111abfc97adb0bb1172778b90cb46aa" - integrity sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w== - dependencies: - "@eslint/object-schema" "^2.1.6" - debug "^4.3.1" - minimatch "^3.1.2" - -"@eslint/core@^0.10.0": - version "0.10.0" - resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.10.0.tgz#23727063c21b335f752dbb3a16450f6f9cbc9091" - integrity sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw== - dependencies: - "@types/json-schema" "^7.0.15" - -"@eslint/core@^0.11.0": - version "0.11.0" - resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.11.0.tgz#7a9226e850922e42cbd2ba71361eacbe74352a12" - integrity sha512-DWUB2pksgNEb6Bz2fggIy1wh6fGgZP4Xyy/Mt0QZPiloKKXerbqq9D3SBQTlCRYOrcRPu4vuz+CGjwdfqxnoWA== - dependencies: - "@types/json-schema" "^7.0.15" - -"@eslint/eslintrc@^3.2.0": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-3.2.0.tgz#57470ac4e2e283a6bf76044d63281196e370542c" - integrity sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^10.0.1" - globals "^14.0.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@eslint/js@9.20.0": - version "9.20.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.20.0.tgz#7421bcbe74889fcd65d1be59f00130c289856eb4" - integrity sha512-iZA07H9io9Wn836aVTytRaNqh00Sad+EamwOVJT12GTLw1VGMFV/4JaME+JjLtr9fiGaoWgYnS54wrfWsSs4oQ== - -"@eslint/object-schema@^2.1.6": - version "2.1.6" - resolved "https://registry.yarnpkg.com/@eslint/object-schema/-/object-schema-2.1.6.tgz#58369ab5b5b3ca117880c0f6c0b0f32f6950f24f" - integrity sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA== - -"@eslint/plugin-kit@^0.2.5": - version "0.2.5" - resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz#ee07372035539e7847ef834e3f5e7b79f09e3a81" - integrity sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A== - dependencies: - "@eslint/core" "^0.10.0" - levn "^0.4.1" - -"@humanfs/core@^0.19.1": - version "0.19.1" - resolved "https://registry.yarnpkg.com/@humanfs/core/-/core-0.19.1.tgz#17c55ca7d426733fe3c561906b8173c336b40a77" - integrity sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA== - -"@humanfs/node@^0.16.6": - version "0.16.6" - resolved "https://registry.yarnpkg.com/@humanfs/node/-/node-0.16.6.tgz#ee2a10eaabd1131987bf0488fd9b820174cd765e" - integrity sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw== - dependencies: - "@humanfs/core" "^0.19.1" - "@humanwhocodes/retry" "^0.3.0" - -"@humanwhocodes/module-importer@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" - integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== - -"@humanwhocodes/retry@^0.3.0": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.3.1.tgz#c72a5c76a9fbaf3488e231b13dc52c0da7bab42a" - integrity sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA== - -"@humanwhocodes/retry@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.4.1.tgz#9a96ce501bc62df46c4031fbd970e3cc6b10f07b" - integrity sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" - integrity sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - slash "^3.0.0" - -"@jest/core@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" - integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== - dependencies: - "@jest/console" "^29.7.0" - "@jest/reporters" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - ci-info "^3.2.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^29.7.0" - jest-config "^29.7.0" - jest-haste-map "^29.7.0" - jest-message-util "^29.7.0" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-resolve-dependencies "^29.7.0" - jest-runner "^29.7.0" - jest-runtime "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - jest-watcher "^29.7.0" - micromatch "^4.0.4" - pretty-format "^29.7.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/create-cache-key-function@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/create-cache-key-function/-/create-cache-key-function-29.7.0.tgz#793be38148fab78e65f40ae30c36785f4ad859f0" - integrity sha512-4QqS3LY5PBmTRHj9sAg1HLoPzqAI0uOX6wI/TRqHIcOxlFidy6YEmCQJk6FSZjNLGCeubDMfmkWL+qaLKhSGQA== - dependencies: - "@jest/types" "^29.6.3" - -"@jest/environment@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" - integrity sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== - dependencies: - "@jest/fake-timers" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-mock "^29.7.0" - -"@jest/expect-utils@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6" - integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== - dependencies: - jest-get-type "^29.6.3" - -"@jest/expect@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.7.0.tgz#76a3edb0cb753b70dfbfe23283510d3d45432bf2" - integrity sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== - dependencies: - expect "^29.7.0" - jest-snapshot "^29.7.0" - -"@jest/fake-timers@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" - integrity sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== - dependencies: - "@jest/types" "^29.6.3" - "@sinonjs/fake-timers" "^10.0.2" - "@types/node" "*" - jest-message-util "^29.7.0" - jest-mock "^29.7.0" - jest-util "^29.7.0" - -"@jest/globals@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" - integrity sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/expect" "^29.7.0" - "@jest/types" "^29.6.3" - jest-mock "^29.7.0" - -"@jest/reporters@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" - integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@jridgewell/trace-mapping" "^0.3.18" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^6.0.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - jest-worker "^29.7.0" - slash "^3.0.0" - string-length "^4.0.1" - strip-ansi "^6.0.0" - v8-to-istanbul "^9.0.1" - -"@jest/schemas@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" - integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== - dependencies: - "@sinclair/typebox" "^0.27.8" - -"@jest/source-map@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" - integrity sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== - dependencies: - "@jridgewell/trace-mapping" "^0.3.18" - callsites "^3.0.0" - graceful-fs "^4.2.9" - -"@jest/test-result@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" - integrity sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== - dependencies: - "@jest/console" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" - integrity sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== - dependencies: - "@jest/test-result" "^29.7.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - slash "^3.0.0" - -"@jest/transform@^29.7.0": - version "29.7.0" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.7.0.tgz#df2dd9c346c7d7768b8a06639994640c642e284c" - integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== - dependencies: - "@babel/core" "^7.11.6" - "@jest/types" "^29.6.3" - "@jridgewell/trace-mapping" "^0.3.18" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^2.0.0" - fast-json-stable-stringify "^2.1.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-regex-util "^29.6.3" - jest-util "^29.7.0" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - write-file-atomic "^4.0.2" - -"@jest/types@^29.6.3": - version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" - integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== - dependencies: - "@jest/schemas" "^29.6.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== - -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.20" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" - integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q== - dependencies: - "@jridgewell/resolve-uri" "^3.1.0" - "@jridgewell/sourcemap-codec" "^1.4.14" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@pkgr/core@^0.2.4": - version "0.2.4" - resolved "https://registry.yarnpkg.com/@pkgr/core/-/core-0.2.4.tgz#d897170a2b0ba51f78a099edccd968f7b103387c" - integrity sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw== - -"@sinclair/typebox@^0.27.8": - version "0.27.8" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" - integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== - -"@sindresorhus/is@^4.6.0": - version "4.6.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" - integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== - -"@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^10.0.2": - version "10.3.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" - integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== - dependencies: - "@sinonjs/commons" "^3.0.0" - -"@swc/core-darwin-arm64@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.16.tgz#2cd45d709ce76d448d96bf8d0006849541436611" - integrity sha512-UOCcH1GvjRnnM/LWT6VCGpIk0OhHRq6v1U6QXuPt5wVsgXnXQwnf5k3sG5Cm56hQHDvhRPY6HCsHi/p0oek8oQ== - -"@swc/core-darwin-x64@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.4.16.tgz#a5bc7d8b1dd850adb0bb95c6b5c742b92201fd01" - integrity sha512-t3bgqFoYLWvyVtVL6KkFNCINEoOrIlyggT/kJRgi1y0aXSr0oVgcrQ4ezJpdeahZZ4N+Q6vT3ffM30yIunELNA== - -"@swc/core-linux-arm-gnueabihf@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.16.tgz#961744908ee5cbb79bc009dcf58cc8b831111f38" - integrity sha512-DvHuwvEF86YvSd0lwnzVcjOTZ0jcxewIbsN0vc/0fqm9qBdMMjr9ox6VCam1n3yYeRtj4VFgrjeNFksqbUejdQ== - -"@swc/core-linux-arm64-gnu@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.16.tgz#43713be3f26757d82d2745dc25f8b63400e0a3d0" - integrity sha512-9Uu5YlPbyCvbidjKtYEsPpyZlu16roOZ5c2tP1vHfnU9bgf5Tz5q5VovSduNxPHx+ed2iC1b1URODHvDzbbDuQ== - -"@swc/core-linux-arm64-musl@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.16.tgz#394a7d030f3a61902bd3947bb9d70d26d42f3c81" - integrity sha512-/YZq/qB1CHpeoL0eMzyqK5/tYZn/rzKoCYDviFU4uduSUIJsDJQuQA/skdqUzqbheOXKAd4mnJ1hT04RbJ8FPQ== - -"@swc/core-linux-x64-gnu@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.16.tgz#71eb108b784f9d551ee8a35ebcdaed972f567981" - integrity sha512-UUjaW5VTngZYDcA8yQlrFmqs1tLi1TxbKlnaJwoNhel9zRQ0yG1YEVGrzTvv4YApSuIiDK18t+Ip927bwucuVQ== - -"@swc/core-linux-x64-musl@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.16.tgz#10dbaedb4e3dfc7268e3a9a66ad3431471ef035b" - integrity sha512-aFhxPifevDTwEDKPi4eRYWzC0p/WYJeiFkkpNU5Uc7a7M5iMWPAbPFUbHesdlb9Jfqs5c07oyz86u+/HySBNPQ== - -"@swc/core-win32-arm64-msvc@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.16.tgz#80247adff6c245ff32b44d773c1a148858cd655f" - integrity sha512-bTD43MbhIHL2s5QgCwyleaGwl96Gk/scF2TaVKdUe4QlJCDV/YK9h5oIBAp63ckHtE8GHlH4c8dZNBiAXn4Org== - -"@swc/core-win32-ia32-msvc@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.16.tgz#e540afc3ccf3224267b4ddfb408f9d9737984686" - integrity sha512-/lmZeAN/qV5XbK2SEvi8e2RkIg8FQNYiSA8y2/Zb4gTUMKVO5JMLH0BSWMiIKMstKDPDSxMWgwJaQHF8UMyPmQ== - -"@swc/core-win32-x64-msvc@1.4.16": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.16.tgz#f880939fca32c181adfe7e3abd2b6b7857bd3489" - integrity sha512-BPAfFfODWXtUu6SwaTTftDHvcbDyWBSI/oanUeRbQR5vVWkXoQ3cxLTsDluc3H74IqXS5z1Uyoe0vNo2hB1opA== - -"@swc/core@^1.3.102": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.4.16.tgz#d175bae2acfecd53bcbd4293f1fba5ec316634a0" - integrity sha512-Xaf+UBvW6JNuV131uvSNyMXHn+bh6LyKN4tbv7tOUFQpXyz/t9YWRE04emtlUW9Y0qrm/GKFCbY8n3z6BpZbTA== - dependencies: - "@swc/counter" "^0.1.2" - "@swc/types" "^0.1.5" - optionalDependencies: - "@swc/core-darwin-arm64" "1.4.16" - "@swc/core-darwin-x64" "1.4.16" - "@swc/core-linux-arm-gnueabihf" "1.4.16" - "@swc/core-linux-arm64-gnu" "1.4.16" - "@swc/core-linux-arm64-musl" "1.4.16" - "@swc/core-linux-x64-gnu" "1.4.16" - "@swc/core-linux-x64-musl" "1.4.16" - "@swc/core-win32-arm64-msvc" "1.4.16" - "@swc/core-win32-ia32-msvc" "1.4.16" - "@swc/core-win32-x64-msvc" "1.4.16" - -"@swc/counter@^0.1.2", "@swc/counter@^0.1.3": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.3.tgz#cc7463bd02949611c6329596fccd2b0ec782b0e9" - integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ== - -"@swc/jest@^0.2.29": - version "0.2.36" - resolved "https://registry.yarnpkg.com/@swc/jest/-/jest-0.2.36.tgz#2797450a30d28b471997a17e901ccad946fe693e" - integrity sha512-8X80dp81ugxs4a11z1ka43FPhP+/e+mJNXJSxiNYk8gIX/jPBtY4gQTrKu/KIoco8bzKuPI5lUxjfLiGsfvnlw== - dependencies: - "@jest/create-cache-key-function" "^29.7.0" - "@swc/counter" "^0.1.3" - jsonc-parser "^3.2.0" - -"@swc/types@^0.1.5": - version "0.1.6" - resolved "https://registry.yarnpkg.com/@swc/types/-/types-0.1.6.tgz#2f13f748995b247d146de2784d3eb7195410faba" - integrity sha512-/JLo/l2JsT/LRd80C3HfbmVpxOAJ11FO2RCEslFrgzLltoP9j8XIbsyDcfCt2WWyX+CM96rBoNM+IToAkFOugg== - dependencies: - "@swc/counter" "^0.1.3" - -"@tsconfig/node10@^1.0.7": - version "1.0.8" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" - integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== - -"@tsconfig/node12@^1.0.7": - version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c" - integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== - -"@tsconfig/node14@^1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2" - integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== - -"@tsconfig/node16@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" - integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== - -"@types/babel__core@^7.1.14": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" - integrity sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA== - dependencies: - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.8" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.8.tgz#f836c61f48b1346e7d2b0d93c6dacc5b9535d3ab" - integrity sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.4.tgz#5672513701c1b2199bc6dad636a9d7491586766f" - integrity sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.20.4" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.4.tgz#ec2c06fed6549df8bc0eb4615b683749a4a92e1b" - integrity sha512-mSM/iKUk5fDDrEV/e83qY+Cr3I1+Q3qqTuEn++HAWYjEa1+NxZr6CNrcJGf2ZTnq4HoFGC3zaTPZTobCzCFukA== - dependencies: - "@babel/types" "^7.20.7" - -"@types/estree@^1.0.6": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" - integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== - -"@types/graceful-fs@^4.1.3": - version "4.1.9" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.9.tgz#2a06bc0f68a20ab37b3e36aa238be6abdf49e8b4" - integrity sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7" - integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== - -"@types/istanbul-lib-report@*": - version "3.0.3" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf" - integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54" - integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@^29.4.0": - version "29.5.11" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.11.tgz#0c13aa0da7d0929f078ab080ae5d4ced80fa2f2c" - integrity sha512-S2mHmYIVe13vrm6q4kN6fLYYAka15ALQki/vgDC3mIukEOx8WJlv0kQPM+d4w8Gp6u0uSdKND04IlTXBv0rwnQ== - dependencies: - expect "^29.0.0" - pretty-format "^29.0.0" - -"@types/json-schema@^7.0.15": - version "7.0.15" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" - integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== - -"@types/node@*": - version "20.10.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.5.tgz#47ad460b514096b7ed63a1dae26fad0914ed3ab2" - integrity sha512-nNPsNE65wjMxEKI93yOP+NPGGBJz/PoN3kZsVLee0XMiJolxSekEVD8wRwBUBqkwc7UWop0edW50yrCQW4CyRw== - dependencies: - undici-types "~5.26.4" - -"@types/node@^20.17.6": - version "20.19.11" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.19.11.tgz#728cab53092bd5f143beed7fbba7ba99de3c16c4" - integrity sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow== - dependencies: - undici-types "~6.21.0" - -"@types/stack-utils@^2.0.0": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.3.tgz#6209321eb2c1712a7e7466422b8cb1fc0d9dd5d8" - integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw== - -"@types/yargs-parser@*": - version "21.0.3" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" - integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== - -"@types/yargs@^17.0.8": - version "17.0.32" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.32.tgz#030774723a2f7faafebf645f4e5a48371dca6229" - integrity sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog== - dependencies: - "@types/yargs-parser" "*" - -"@typescript-eslint/eslint-plugin@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.31.1.tgz#62f1befe59647524994e89de4516d8dcba7a850a" - integrity sha512-oUlH4h1ABavI4F0Xnl8/fOtML/eu8nI2A1nYd+f+55XI0BLu+RIqKoCiZKNo6DtqZBEQm5aNKA20G3Z5w3R6GQ== - dependencies: - "@eslint-community/regexpp" "^4.10.0" - "@typescript-eslint/scope-manager" "8.31.1" - "@typescript-eslint/type-utils" "8.31.1" - "@typescript-eslint/utils" "8.31.1" - "@typescript-eslint/visitor-keys" "8.31.1" - graphemer "^1.4.0" - ignore "^5.3.1" - natural-compare "^1.4.0" - ts-api-utils "^2.0.1" - -"@typescript-eslint/parser@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-8.31.1.tgz#e9b0ccf30d37dde724ee4d15f4dbc195995cce1b" - integrity sha512-oU/OtYVydhXnumd0BobL9rkJg7wFJ9bFFPmSmB/bf/XWN85hlViji59ko6bSKBXyseT9V8l+CN1nwmlbiN0G7Q== - dependencies: - "@typescript-eslint/scope-manager" "8.31.1" - "@typescript-eslint/types" "8.31.1" - "@typescript-eslint/typescript-estree" "8.31.1" - "@typescript-eslint/visitor-keys" "8.31.1" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-8.31.1.tgz#1eb52e76878f545e4add142e0d8e3e97e7aa443b" - integrity sha512-BMNLOElPxrtNQMIsFHE+3P0Yf1z0dJqV9zLdDxN/xLlWMlXK/ApEsVEKzpizg9oal8bAT5Sc7+ocal7AC1HCVw== - dependencies: - "@typescript-eslint/types" "8.31.1" - "@typescript-eslint/visitor-keys" "8.31.1" - -"@typescript-eslint/type-utils@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-8.31.1.tgz#be0f438fb24b03568e282a0aed85f776409f970c" - integrity sha512-fNaT/m9n0+dpSp8G/iOQ05GoHYXbxw81x+yvr7TArTuZuCA6VVKbqWYVZrV5dVagpDTtj/O8k5HBEE/p/HM5LA== - dependencies: - "@typescript-eslint/typescript-estree" "8.31.1" - "@typescript-eslint/utils" "8.31.1" - debug "^4.3.4" - ts-api-utils "^2.0.1" - -"@typescript-eslint/types@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.31.1.tgz#478ed6f7e8aee1be7b63a60212b6bffe1423b5d4" - integrity sha512-SfepaEFUDQYRoA70DD9GtytljBePSj17qPxFHA/h3eg6lPTqGJ5mWOtbXCk1YrVU1cTJRd14nhaXWFu0l2troQ== - -"@typescript-eslint/typescript-estree@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.31.1.tgz#37792fe7ef4d3021c7580067c8f1ae66daabacdf" - integrity sha512-kaA0ueLe2v7KunYOyWYtlf/QhhZb7+qh4Yw6Ni5kgukMIG+iP773tjgBiLWIXYumWCwEq3nLW+TUywEp8uEeag== - dependencies: - "@typescript-eslint/types" "8.31.1" - "@typescript-eslint/visitor-keys" "8.31.1" - debug "^4.3.4" - fast-glob "^3.3.2" - is-glob "^4.0.3" - minimatch "^9.0.4" - semver "^7.6.0" - ts-api-utils "^2.0.1" - -"@typescript-eslint/utils@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-8.31.1.tgz#5628ea0393598a0b2f143d0fc6d019f0dee9dd14" - integrity sha512-2DSI4SNfF5T4oRveQ4nUrSjUqjMND0nLq9rEkz0gfGr3tg0S5KB6DhwR+WZPCjzkZl3cH+4x2ce3EsL50FubjQ== - dependencies: - "@eslint-community/eslint-utils" "^4.4.0" - "@typescript-eslint/scope-manager" "8.31.1" - "@typescript-eslint/types" "8.31.1" - "@typescript-eslint/typescript-estree" "8.31.1" - -"@typescript-eslint/visitor-keys@8.31.1": - version "8.31.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.31.1.tgz#6742b0e3ba1e0c1e35bdaf78c03e759eb8dd8e75" - integrity sha512-I+/rgqOVBn6f0o7NDTmAPWWC6NuqhV174lfYvAm9fUaWeiefLdux9/YI3/nLugEn9L8fcSi0XmpKi/r5u0nmpw== - dependencies: - "@typescript-eslint/types" "8.31.1" - eslint-visitor-keys "^4.2.0" - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn-walk@^8.1.1: - version "8.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" - integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== - -acorn@^8.14.0: - version "8.14.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" - integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== - -acorn@^8.4.1: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== - -aggregate-error@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" - integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== - dependencies: - clean-stack "^2.0.0" - indent-string "^4.0.0" - -ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-escapes@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-7.0.0.tgz#00fc19f491bbb18e1d481b97868204f92109bfe7" - integrity sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw== - dependencies: - environment "^1.0.0" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" - integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -any-promise@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" - integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== - -anymatch@^3.0.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -arg@^4.1.0: - version "4.1.3" - resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" - integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -babel-jest@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" - integrity sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== - dependencies: - "@jest/transform" "^29.7.0" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^29.6.3" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz#aadbe943464182a8922c3c927c3067ff40d24626" - integrity sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.1.14" - "@types/babel__traverse" "^7.0.6" - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz#fa05fa510e7d493896d7b0dd2033601c840f171c" - integrity sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== - dependencies: - babel-plugin-jest-hoist "^29.6.3" - babel-preset-current-node-syntax "^1.0.0" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" - integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== - dependencies: - fill-range "^7.1.1" - -browserslist@^4.22.2: - version "4.22.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" - integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== - dependencies: - caniuse-lite "^1.0.30001565" - electron-to-chromium "^1.4.601" - node-releases "^2.0.14" - update-browserslist-db "^1.0.13" - -bs-logger@0.x: - version "0.2.6" - resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" - integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== - dependencies: - fast-json-stable-stringify "2.x" - -bser@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.2.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -caniuse-lite@^1.0.30001565: - version "1.0.30001570" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001570.tgz#b4e5c1fa786f733ab78fc70f592df6b3f23244ca" - integrity sha512-+3e0ASu4sw1SWaoCtvPeyXp+5PsjigkSt8OXZbF9StH5pQWbxEjLAZE3n8Aup5udop1uRiKA7a4utUk/uoSpUw== - -chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^4.0.0, chalk@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" - integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== - -char-regex@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -ci-info@^3.2.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" - integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== - -cjs-module-lexer@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" - integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== - -cjs-module-lexer@^1.2.3: - version "1.4.1" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz#707413784dbb3a72aa11c2f2b042a0bef4004170" - integrity sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA== - -clean-stack@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" - integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== - -cli-highlight@^2.1.11: - version "2.1.11" - resolved "https://registry.yarnpkg.com/cli-highlight/-/cli-highlight-2.1.11.tgz#49736fa452f0aaf4fae580e30acb26828d2dc1bf" - integrity sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg== - dependencies: - chalk "^4.0.0" - highlight.js "^10.7.1" - mz "^2.4.0" - parse5 "^5.1.1" - parse5-htmlparser2-tree-adapter "^6.0.0" - yargs "^16.0.0" - -cli-table3@^0.6.3, cli-table3@^0.6.5: - version "0.6.5" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.5.tgz#013b91351762739c16a9567c21a04632e449bf2f" - integrity sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ== - dependencies: - string-width "^4.2.0" - optionalDependencies: - "@colors/colors" "1.5.0" - -cliui@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -cliui@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" - integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.1" - wrap-ansi "^7.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== - -collect-v8-coverage@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" - integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -commander@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" - integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -convert-source-map@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" - integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== - -create-jest@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" - integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== - dependencies: - "@jest/types" "^29.6.3" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-config "^29.7.0" - jest-util "^29.7.0" - prompts "^2.0.1" - -create-require@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" - integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== - -cross-spawn@^7.0.3, cross-spawn@^7.0.6: - version "7.0.6" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" - integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^4.3.4, debug@^4.3.7: - version "4.3.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" - integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== - dependencies: - ms "^2.1.3" - -dedent@^1.0.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" - integrity sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg== - -deep-is@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^4.2.2: - version "4.3.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" - integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== - -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -diff-sequences@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" - integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== - -diff@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" - integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== - -electron-to-chromium@^1.4.601: - version "1.4.614" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.614.tgz#2fe789d61fa09cb875569f37c309d0c2701f91c0" - integrity sha512-X4ze/9Sc3QWs6h92yerwqv7aB/uU8vCjZcrMjA8N9R1pjMFRe44dLsck5FzLilOYvcXuDn93B+bpGYyufc70gQ== - -emittery@^0.13.1: - version "0.13.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" - integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -emojilib@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/emojilib/-/emojilib-2.4.0.tgz#ac518a8bb0d5f76dda57289ccb2fdf9d39ae721e" - integrity sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw== - -environment@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/environment/-/environment-1.1.0.tgz#8e86c66b180f363c7ab311787e0259665f45a9f1" - integrity sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q== - -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -eslint-plugin-prettier@^5.4.1: - version "5.4.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-5.4.1.tgz#99b55d7dd70047886b2222fdd853665f180b36af" - integrity sha512-9dF+KuU/Ilkq27A8idRP7N2DH8iUR6qXcjF3FR2wETY21PZdBrIjwCau8oboyGj9b7etWmTGEeM8e7oOed6ZWg== - dependencies: - prettier-linter-helpers "^1.0.0" - synckit "^0.11.7" - -eslint-plugin-unused-imports@^4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.1.4.tgz#62ddc7446ccbf9aa7b6f1f0b00a980423cda2738" - integrity sha512-YptD6IzQjDardkl0POxnnRBhU1OEePMV0nd6siHaRBbd+lyh6NAhFEobiznKU7kTsSsDeSD62Pe7kAM1b7dAZQ== - -eslint-scope@^8.2.0: - version "8.2.0" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-8.2.0.tgz#377aa6f1cb5dc7592cfd0b7f892fd0cf352ce442" - integrity sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-visitor-keys@^3.3.0: - version "3.4.3" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" - integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== - -eslint-visitor-keys@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz#687bacb2af884fcdda8a6e7d65c606f46a14cd45" - integrity sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw== - -eslint@^9.20.1: - version "9.20.1" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.20.1.tgz#923924c078f5226832449bac86662dd7e53c91d6" - integrity sha512-m1mM33o6dBUjxl2qb6wv6nGNwCAsns1eKtaQ4l/NPHeTvhiUPbtdfMyktxN4B3fgHIgsYh1VT3V9txblpQHq+g== - dependencies: - "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.12.1" - "@eslint/config-array" "^0.19.0" - "@eslint/core" "^0.11.0" - "@eslint/eslintrc" "^3.2.0" - "@eslint/js" "9.20.0" - "@eslint/plugin-kit" "^0.2.5" - "@humanfs/node" "^0.16.6" - "@humanwhocodes/module-importer" "^1.0.1" - "@humanwhocodes/retry" "^0.4.1" - "@types/estree" "^1.0.6" - "@types/json-schema" "^7.0.15" - ajv "^6.12.4" - chalk "^4.0.0" - cross-spawn "^7.0.6" - debug "^4.3.2" - escape-string-regexp "^4.0.0" - eslint-scope "^8.2.0" - eslint-visitor-keys "^4.2.0" - espree "^10.3.0" - esquery "^1.5.0" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^8.0.0" - find-up "^5.0.0" - glob-parent "^6.0.2" - ignore "^5.2.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - json-stable-stringify-without-jsonify "^1.0.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.3" - -espree@^10.0.1, espree@^10.3.0: - version "10.3.0" - resolved "https://registry.yarnpkg.com/espree/-/espree-10.3.0.tgz#29267cf5b0cb98735b65e64ba07e0ed49d1eed8a" - integrity sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg== - dependencies: - acorn "^8.14.0" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^4.2.0" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.6.0.tgz#91419234f804d852a82dceec3e16cdc22cf9dae7" - integrity sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expect@^29.0.0, expect@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" - integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== - dependencies: - "@jest/expect-utils" "^29.7.0" - jest-get-type "^29.6.3" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-diff@^1.1.2: - version "1.3.0" - resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0" - integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw== - -fast-glob@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129" - integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.17.1" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47" - integrity sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w== - dependencies: - reusify "^1.0.4" - -fb-watchman@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" - integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== - dependencies: - bser "2.1.1" - -fflate@^0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.8.2.tgz#fc8631f5347812ad6028bbe4a2308b2792aa1dea" - integrity sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A== - -file-entry-cache@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-8.0.0.tgz#7787bddcf1131bffb92636c69457bbc0edd6d81f" - integrity sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ== - dependencies: - flat-cache "^4.0.0" - -fill-range@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" - integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== - dependencies: - to-regex-range "^5.0.1" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -flat-cache@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-4.0.1.tgz#0ece39fcb14ee012f4b0410bd33dd9c1f011127c" - integrity sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw== - dependencies: - flatted "^3.2.9" - keyv "^4.5.4" - -flatted@^3.2.9: - version "3.3.2" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.2.tgz#adba1448a9841bec72b42c532ea23dbbedef1a27" - integrity sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@^2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -function-bind@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" - integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stdin@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" - integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== - -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -glob-parent@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob@^7.1.3, glob@^7.1.4: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^8.0.1: - version "8.1.0" - resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" - integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^5.0.1" - once "^1.3.0" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^14.0.0: - version "14.0.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-14.0.0.tgz#898d7413c29babcf6bafe56fcadded858ada724e" - integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== - -graceful-fs@^4.2.9: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -graphemer@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" - integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -hasown@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" - integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== - dependencies: - function-bind "^1.1.2" - -highlight.js@^10.7.1: - version "10.7.3" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.7.3.tgz#697272e3991356e40c3cac566a74eef681756531" - integrity sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A== - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -iconv-lite@^0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - -ignore-walk@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-5.0.1.tgz#5f199e23e1288f518d90358d461387788a154776" - integrity sha512-yemi4pMf51WKT7khInJqAvsIGzoqYXblnsz0ql8tM+yi1EKYTY1evX4NAbJrLL/Aanr2HyZeluqU+Oi7MGHokw== - dependencies: - minimatch "^5.0.1" - -ignore@^5.2.0, ignore@^5.3.1: - version "5.3.2" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" - integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== - -import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -indent-string@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@^2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - -is-core-module@^2.13.0: - version "2.13.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" - integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== - dependencies: - hasown "^2.0.0" - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-stream@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" - integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== - -istanbul-lib-instrument@^5.0.4: - version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" - integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-instrument@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.1.tgz#71e87707e8041428732518c6fb5211761753fbdf" - integrity sha512-EAMEJBsYuyyztxMxW3g7ugGPkrZsV57v0Hmv3mm1uQsmB+QnZuepg731CRaIgeUVSdmsTngOkSnauNF8p7FIhA== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^7.5.4" - -istanbul-lib-report@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" - integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^4.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.6" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" - integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jest-changed-files@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" - integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== - dependencies: - execa "^5.0.0" - jest-util "^29.7.0" - p-limit "^3.1.0" - -jest-circus@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.7.0.tgz#b6817a45fcc835d8b16d5962d0c026473ee3668a" - integrity sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/expect" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^1.0.0" - is-generator-fn "^2.0.0" - jest-each "^29.7.0" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-runtime "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - p-limit "^3.1.0" - pretty-format "^29.7.0" - pure-rand "^6.0.0" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-cli@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" - integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== - dependencies: - "@jest/core" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - chalk "^4.0.0" - create-jest "^29.7.0" - exit "^0.1.2" - import-local "^3.0.2" - jest-config "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - yargs "^17.3.1" - -jest-config@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" - integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== - dependencies: - "@babel/core" "^7.11.6" - "@jest/test-sequencer" "^29.7.0" - "@jest/types" "^29.6.3" - babel-jest "^29.7.0" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-circus "^29.7.0" - jest-environment-node "^29.7.0" - jest-get-type "^29.6.3" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-runner "^29.7.0" - jest-util "^29.7.0" - jest-validate "^29.7.0" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^29.7.0" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" - integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== - dependencies: - chalk "^4.0.0" - diff-sequences "^29.6.3" - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-docblock@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" - integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== - dependencies: - detect-newline "^3.0.0" - -jest-each@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" - integrity sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== - dependencies: - "@jest/types" "^29.6.3" - chalk "^4.0.0" - jest-get-type "^29.6.3" - jest-util "^29.7.0" - pretty-format "^29.7.0" - -jest-environment-node@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.7.0.tgz#0b93e111dda8ec120bc8300e6d1fb9576e164376" - integrity sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/fake-timers" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-mock "^29.7.0" - jest-util "^29.7.0" - -jest-get-type@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1" - integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw== - -jest-haste-map@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.7.0.tgz#3c2396524482f5a0506376e6c858c3bbcc17b104" - integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== - dependencies: - "@jest/types" "^29.6.3" - "@types/graceful-fs" "^4.1.3" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^29.6.3" - jest-util "^29.7.0" - jest-worker "^29.7.0" - micromatch "^4.0.4" - walker "^1.0.8" - optionalDependencies: - fsevents "^2.3.2" - -jest-leak-detector@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" - integrity sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== - dependencies: - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-matcher-utils@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12" - integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g== - dependencies: - chalk "^4.0.0" - jest-diff "^29.7.0" - jest-get-type "^29.6.3" - pretty-format "^29.7.0" - -jest-message-util@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" - integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.6.3" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^29.7.0" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" - integrity sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - jest-util "^29.7.0" - -jest-pnp-resolver@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" - integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== - -jest-regex-util@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" - integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== - -jest-resolve-dependencies@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" - integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== - dependencies: - jest-regex-util "^29.6.3" - jest-snapshot "^29.7.0" - -jest-resolve@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.7.0.tgz#64d6a8992dd26f635ab0c01e5eef4399c6bcbc30" - integrity sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== - dependencies: - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-pnp-resolver "^1.2.2" - jest-util "^29.7.0" - jest-validate "^29.7.0" - resolve "^1.20.0" - resolve.exports "^2.0.0" - slash "^3.0.0" - -jest-runner@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" - integrity sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== - dependencies: - "@jest/console" "^29.7.0" - "@jest/environment" "^29.7.0" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.13.1" - graceful-fs "^4.2.9" - jest-docblock "^29.7.0" - jest-environment-node "^29.7.0" - jest-haste-map "^29.7.0" - jest-leak-detector "^29.7.0" - jest-message-util "^29.7.0" - jest-resolve "^29.7.0" - jest-runtime "^29.7.0" - jest-util "^29.7.0" - jest-watcher "^29.7.0" - jest-worker "^29.7.0" - p-limit "^3.1.0" - source-map-support "0.5.13" - -jest-runtime@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" - integrity sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== - dependencies: - "@jest/environment" "^29.7.0" - "@jest/fake-timers" "^29.7.0" - "@jest/globals" "^29.7.0" - "@jest/source-map" "^29.6.3" - "@jest/test-result" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^29.7.0" - jest-message-util "^29.7.0" - jest-mock "^29.7.0" - jest-regex-util "^29.6.3" - jest-resolve "^29.7.0" - jest-snapshot "^29.7.0" - jest-util "^29.7.0" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-snapshot@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" - integrity sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== - dependencies: - "@babel/core" "^7.11.6" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-jsx" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/types" "^7.3.3" - "@jest/expect-utils" "^29.7.0" - "@jest/transform" "^29.7.0" - "@jest/types" "^29.6.3" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^29.7.0" - graceful-fs "^4.2.9" - jest-diff "^29.7.0" - jest-get-type "^29.6.3" - jest-matcher-utils "^29.7.0" - jest-message-util "^29.7.0" - jest-util "^29.7.0" - natural-compare "^1.4.0" - pretty-format "^29.7.0" - semver "^7.5.3" - -jest-util@^29.0.0, jest-util@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" - integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== - dependencies: - "@jest/types" "^29.6.3" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" - integrity sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== - dependencies: - "@jest/types" "^29.6.3" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^29.6.3" - leven "^3.1.0" - pretty-format "^29.7.0" - -jest-watcher@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" - integrity sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== - dependencies: - "@jest/test-result" "^29.7.0" - "@jest/types" "^29.6.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.13.1" - jest-util "^29.7.0" - string-length "^4.0.1" - -jest-worker@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" - integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== - dependencies: - "@types/node" "*" - jest-util "^29.7.0" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^29.4.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-29.7.0.tgz#994676fc24177f088f1c5e3737f5697204ff2613" - integrity sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== - dependencies: - "@jest/core" "^29.7.0" - "@jest/types" "^29.6.3" - import-local "^3.0.2" - jest-cli "^29.7.0" - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -json-buffer@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^2.2.2, json5@^2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" - integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== - -jsonc-parser@^3.2.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.1.tgz#031904571ccf929d7670ee8c547545081cb37f1a" - integrity sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA== - -keyv@^4.5.4: - version "4.5.4" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" - integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== - dependencies: - json-buffer "3.0.1" - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash.memoize@4.x: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lru-cache@^10.4.3: - version "10.4.3" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" - integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -make-dir@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" - integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== - dependencies: - semver "^7.5.3" - -make-error@1.x, make-error@^1.1.1: - version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" - integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== - -makeerror@1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -marked-terminal@^7.1.0: - version "7.2.1" - resolved "https://registry.yarnpkg.com/marked-terminal/-/marked-terminal-7.2.1.tgz#9c1ae073a245a03c6a13e3eeac6f586f29856068" - integrity sha512-rQ1MoMFXZICWNsKMiiHwP/Z+92PLKskTPXj+e7uwXmuMPkNn7iTqC+IvDekVm1MPeC9wYQeLxeFaOvudRR/XbQ== - dependencies: - ansi-escapes "^7.0.0" - ansi-regex "^6.1.0" - chalk "^5.3.0" - cli-highlight "^2.1.11" - cli-table3 "^0.6.5" - node-emoji "^2.1.3" - supports-hyperlinks "^3.1.0" - -marked@^9.1.2: - version "9.1.6" - resolved "https://registry.yarnpkg.com/marked/-/marked-9.1.6.tgz#5d2a3f8180abfbc5d62e3258a38a1c19c0381695" - integrity sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q== - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -micromatch@^4.0.4: - version "4.0.8" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" - integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== - dependencies: - braces "^3.0.3" - picomatch "^2.3.1" - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^5.0.1: - version "5.1.6" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" - integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^9.0.4: - version "9.0.5" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" - integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== - dependencies: - brace-expansion "^2.0.1" - -minimist@^1.2.6: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - -mri@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" - integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@^2.1.3: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -mz@^2.4.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" - integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== - dependencies: - any-promise "^1.0.0" - object-assign "^4.0.1" - thenify-all "^1.0.0" - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -node-emoji@^2.1.3: - version "2.1.3" - resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-2.1.3.tgz#93cfabb5cc7c3653aa52f29d6ffb7927d8047c06" - integrity sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA== - dependencies: - "@sindresorhus/is" "^4.6.0" - char-regex "^1.0.2" - emojilib "^2.4.0" - skin-tone "^2.0.0" - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== - -node-releases@^2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" - integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -npm-bundled@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-2.0.1.tgz#94113f7eb342cd7a67de1e789f896b04d2c600f4" - integrity sha512-gZLxXdjEzE/+mOstGDqR6b0EkhJ+kM6fxM6vUuckuctuVPh80Q6pw/rSZj9s4Gex9GxWtIicO1pc8DB9KZWudw== - dependencies: - npm-normalize-package-bin "^2.0.0" - -npm-normalize-package-bin@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-2.0.0.tgz#9447a1adaaf89d8ad0abe24c6c84ad614a675fff" - integrity sha512-awzfKUO7v0FscrSpRoogyNm0sajikhBWpU0QMrW09AMi9n1PoKU6WaIqUzuJSQnpciZZmJ/jMZ2Egfmb/9LiWQ== - -npm-packlist@^5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-5.1.3.tgz#69d253e6fd664b9058b85005905012e00e69274b" - integrity sha512-263/0NGrn32YFYi4J533qzrQ/krmmrWwhKkzwTuM4f/07ug51odoaNjUexxO4vxlzURHcmYMH1QjvHjsNDKLVg== - dependencies: - glob "^8.0.1" - ignore-walk "^5.0.1" - npm-bundled "^2.0.0" - npm-normalize-package-bin "^2.0.0" - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -object-assign@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -optionator@^0.9.3: - version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" - integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== - dependencies: - "@aashutoshrathi/word-wrap" "^1.2.3" - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - -p-all@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-all/-/p-all-3.0.0.tgz#077c023c37e75e760193badab2bad3ccd5782bfb" - integrity sha512-qUZbvbBFVXm6uJ7U/WDiO0fv6waBMbjlCm4E66oZdRR+egswICarIdHyVSZZHudH8T5SF8x/JG0q0duFzPnlBw== - dependencies: - p-map "^4.0.0" - -p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2, p-limit@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-map@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" - integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== - dependencies: - aggregate-error "^3.0.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -parse5-htmlparser2-tree-adapter@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz#2cdf9ad823321140370d4dbf5d3e92c7c8ddc6e6" - integrity sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA== - dependencies: - parse5 "^6.0.1" - -parse5@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.1.tgz#f68e4e5ba1852ac2cadc00f4555fff6c2abb6178" - integrity sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug== - -parse5@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picocolors@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" - integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== - -picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pirates@^4.0.4: - version "4.0.6" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" - integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== - -pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prettier-linter-helpers@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" - integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== - dependencies: - fast-diff "^1.1.2" - -prettier@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.1.1.tgz#6ba9f23165d690b6cbdaa88cb0807278f7019848" - integrity sha512-22UbSzg8luF4UuZtzgiUOfcGM8s4tjBv6dJRT7j275NXsy2jb4aJa4NNveul5x4eqlF1wuhuR2RElK71RvmVaw== - -pretty-format@^29.0.0, pretty-format@^29.7.0: - version "29.7.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" - integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== - dependencies: - "@jest/schemas" "^29.6.3" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -prompts@^2.0.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -publint@^0.2.12: - version "0.2.12" - resolved "https://registry.yarnpkg.com/publint/-/publint-0.2.12.tgz#d25cd6bd243d5bdd640344ecdddb3eeafdcc4059" - integrity sha512-YNeUtCVeM4j9nDiTT2OPczmlyzOkIXNtdDZnSuajAxS/nZ6j3t7Vs9SUB4euQNddiltIwu7Tdd3s+hr08fAsMw== - dependencies: - npm-packlist "^5.1.3" - picocolors "^1.1.1" - sade "^1.8.1" - -punycode@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" - integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== - -pure-rand@^6.0.0: - version "6.0.4" - resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.0.4.tgz#50b737f6a925468679bff00ad20eade53f37d5c7" - integrity sha512-LA0Y9kxMYv47GIPJy6MI84fqTd2HmYZI83W/kM/SkKfDlajnZYfmXFTxkbY+xSBPkLJxltMa9hIkmdc29eguMA== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -react-is@^18.0.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== - -readable-stream@^3.4.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve.exports@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" - integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== - -resolve@^1.20.0: - version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" - integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -sade@^1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/sade/-/sade-1.8.1.tgz#0a78e81d658d394887be57d2a409bf703a3b2701" - integrity sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A== - dependencies: - mri "^1.1.0" - -safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -"safer-buffer@>= 2.1.2 < 3.0.0": - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -semver@^6.3.0, semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.5.3: - version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== - dependencies: - lru-cache "^6.0.0" - -semver@^7.5.4: - version "7.6.3" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" - integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== - -semver@^7.6.0: - version "7.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" - integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -signal-exit@^3.0.3, signal-exit@^3.0.7: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -skin-tone@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/skin-tone/-/skin-tone-2.0.0.tgz#4e3933ab45c0d4f4f781745d64b9f4c208e41237" - integrity sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA== - dependencies: - unicode-emoji-modifier-base "^1.0.0" - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -source-map-support@0.5.13: - version "0.5.13" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" - integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0, source-map@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -stack-utils@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" - integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== - dependencies: - escape-string-regexp "^2.0.0" - -string-length@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-to-stream@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/string-to-stream/-/string-to-stream-3.0.1.tgz#480e6fb4d5476d31cb2221f75307a5dcb6638a42" - integrity sha512-Hl092MV3USJuUCC6mfl9sPzGloA3K5VwdIeJjYIkXY/8K+mUvaeEabWJgArp+xXrsWxCajeT2pc4axbVhIZJyg== - dependencies: - readable-stream "^3.4.0" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -superstruct@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/superstruct/-/superstruct-1.0.4.tgz#0adb99a7578bd2f1c526220da6571b2d485d91ca" - integrity sha512-7JpaAoX2NGyoFlI9NBh66BQXGONc+uE+MRS5i2iOBKuS4e+ccgMDjATgZldkah+33DakBxDHiss9kvUcGAO8UQ== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-3.1.0.tgz#b56150ff0173baacc15f21956450b61f2b18d3ac" - integrity sha512-2rn0BZ+/f7puLOHZm1HOJfwBggfaHXUpPUSSG/SWM4TWp5KCfmNYwnC3hruy2rZlMnmWZ+QAGpZfchu3f3695A== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -synckit@^0.11.7: - version "0.11.8" - resolved "https://registry.yarnpkg.com/synckit/-/synckit-0.11.8.tgz#b2aaae998a4ef47ded60773ad06e7cb821f55457" - integrity sha512-+XZ+r1XGIJGeQk3VvXhT6xx/VpbHsRzsTkGgF6E5RX9TTXD0118l87puaEBZ566FhqblC6U0d4XnubznJDm30A== - dependencies: - "@pkgr/core" "^0.2.4" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -thenify-all@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" - integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== - dependencies: - thenify ">= 3.1.0 < 4" - -"thenify@>= 3.1.0 < 4": - version "3.3.1" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" - integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== - dependencies: - any-promise "^1.0.0" - -tmpl@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -ts-api-utils@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-2.0.1.tgz#660729385b625b939aaa58054f45c058f33f10cd" - integrity sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w== - -ts-jest@^29.1.0: - version "29.1.1" - resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" - integrity sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA== - dependencies: - bs-logger "0.x" - fast-json-stable-stringify "2.x" - jest-util "^29.0.0" - json5 "^2.2.3" - lodash.memoize "4.x" - make-error "1.x" - semver "^7.5.3" - yargs-parser "^21.0.1" - -ts-node@^10.5.0: - version "10.7.0" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.7.0.tgz#35d503d0fab3e2baa672a0e94f4b40653c2463f5" - integrity sha512-TbIGS4xgJoX2i3do417KSaep1uRAW/Lu+WAL2doDHC0D6ummjirVOXU5/7aiZotbQ5p1Zp9tP7U6cYhA0O7M8A== - dependencies: - "@cspotcode/source-map-support" "0.7.0" - "@tsconfig/node10" "^1.0.7" - "@tsconfig/node12" "^1.0.7" - "@tsconfig/node14" "^1.0.0" - "@tsconfig/node16" "^1.0.2" - acorn "^8.4.1" - acorn-walk "^8.1.1" - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - v8-compile-cache-lib "^3.0.0" - yn "3.1.1" - -"tsc-multi@https://github.com/stainless-api/tsc-multi/releases/download/v1.1.9/tsc-multi.tgz": - version "1.1.9" - resolved "https://github.com/stainless-api/tsc-multi/releases/download/v1.1.9/tsc-multi.tgz#777f6f5d9e26bf0e94e5170990dd3a841d6707cd" - dependencies: - debug "^4.3.7" - fast-glob "^3.3.2" - get-stdin "^8.0.0" - p-all "^3.0.0" - picocolors "^1.1.1" - signal-exit "^3.0.7" - string-to-stream "^3.0.1" - superstruct "^1.0.4" - tslib "^2.8.1" - yargs "^17.7.2" - -tsconfig-paths@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c" - integrity sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg== - dependencies: - json5 "^2.2.2" - minimist "^1.2.6" - strip-bom "^3.0.0" - -tslib@^2.8.1: - version "2.8.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" - integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-detect@4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -typescript-eslint@8.31.1: - version "8.31.1" - resolved "https://registry.yarnpkg.com/typescript-eslint/-/typescript-eslint-8.31.1.tgz#b77ab1e48ced2daab9225ff94bab54391a4af69b" - integrity sha512-j6DsEotD/fH39qKzXTQRwYYWlt7D+0HmfpOK+DVhwJOFLcdmn92hq3mBb7HlKJHbjjI/gTOqEcc9d6JfpFf/VA== - dependencies: - "@typescript-eslint/eslint-plugin" "8.31.1" - "@typescript-eslint/parser" "8.31.1" - "@typescript-eslint/utils" "8.31.1" - -typescript@5.6.1-rc: - version "5.6.1-rc" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.1-rc.tgz#d5e4d7d8170174fed607b74cc32aba3d77018e02" - integrity sha512-E3b2+1zEFu84jB0YQi9BORDjz9+jGbwwy1Zi3G0LUNw7a7cePUrHMRNy8aPh53nXpkFGVHSxIZo5vKTfYaFiBQ== - -typescript@5.8.3: - version "5.8.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.8.3.tgz#92f8a3e5e3cf497356f4178c34cd65a7f5e8440e" - integrity sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ== - -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== - -undici-types@~6.21.0: - version "6.21.0" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" - integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== - -unicode-emoji-modifier-base@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unicode-emoji-modifier-base/-/unicode-emoji-modifier-base-1.0.0.tgz#dbbd5b54ba30f287e2a8d5a249da6c0cef369459" - integrity sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g== - -update-browserslist-db@^1.0.13: - version "1.0.13" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" - integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -util-deprecate@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -v8-compile-cache-lib@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.0.tgz#0582bcb1c74f3a2ee46487ceecf372e46bce53e8" - integrity sha512-mpSYqfsFvASnSn5qMiwrr4VKfumbPyONLCOPmsR3A6pTY/r0+tSaVbgPWSAIuzbk3lCTa+FForeTiO+wBQGkjA== - -v8-to-istanbul@^9.0.1: - version "9.2.0" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" - integrity sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA== - dependencies: - "@jridgewell/trace-mapping" "^0.3.12" - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^2.0.0" - -validate-npm-package-name@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz#a316573e9b49f3ccd90dbb6eb52b3f06c6d604e8" - integrity sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ== - -walker@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -which@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -write-file-atomic@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" - integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== - dependencies: - imurmurhash "^0.1.4" - signal-exit "^3.0.7" - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs-parser@^21.0.1, yargs-parser@^21.1.1: - version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" - integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== - -yargs@^16.0.0: - version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - -yargs@^17.3.1, yargs@^17.7.2: - version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" - integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - -yn@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" - integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==