diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 2714d6c3..2af5a009 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -5,8 +5,8 @@ management: docVersion: 1.0.0 speakeasyVersion: 1.568.2 generationVersion: 2.634.2 - releaseVersion: 1.9.7 - configChecksum: 12add5d16b9129632055a59af01aa628 + releaseVersion: 1.9.8 + configChecksum: d7b7d0bf04e94ae161a650b203bc54df repoURL: https://github.com/mistralai/client-python.git installationURL: https://github.com/mistralai/client-python.git published: true diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 90a0d865..fd56d757 100644 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -21,7 +21,7 @@ generation: generateNewTests: false skipResponseBodyAssertions: false python: - version: 1.9.7 + version: 1.9.8 additionalDependencies: dev: pytest: ^8.2.2 diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 1b1d6fed..4caed8b7 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -2,8 +2,8 @@ speakeasyVersion: 1.568.2 sources: mistral-azure-source: sourceNamespace: mistral-openapi-azure - sourceRevisionDigest: sha256:c5931a7e0cc2db844149d71db57dfc2178665f0400bc26c90ee113795ea2872f - sourceBlobDigest: sha256:504fff788fdac8d781e33d85e3a04d35f6d9f7a3ef5ed40da8b4567074e94f03 + sourceRevisionDigest: sha256:670c460702ec74f7077491464a6dc5ee9d873969c80e812c48dbf4deb160e470 + sourceBlobDigest: sha256:5a3ebfa4cb00a015bb7bb03ec7442fc7e0b9c17ca66ab35d3045290b2ad87eac tags: - latest mistral-google-cloud-source: @@ -18,15 +18,15 @@ sources: sourceBlobDigest: sha256:64fa97f0836a2441e32bc255ced698234c147c790775988102f17b493f0da87d tags: - latest - - speakeasy-sdk-regen-1755678497 + - speakeasy-sdk-regen-1756133663 targets: mistralai-azure-sdk: source: mistral-azure-source sourceNamespace: mistral-openapi-azure - sourceRevisionDigest: sha256:c5931a7e0cc2db844149d71db57dfc2178665f0400bc26c90ee113795ea2872f - sourceBlobDigest: sha256:504fff788fdac8d781e33d85e3a04d35f6d9f7a3ef5ed40da8b4567074e94f03 + sourceRevisionDigest: sha256:670c460702ec74f7077491464a6dc5ee9d873969c80e812c48dbf4deb160e470 + sourceBlobDigest: sha256:5a3ebfa4cb00a015bb7bb03ec7442fc7e0b9c17ca66ab35d3045290b2ad87eac codeSamplesNamespace: mistral-openapi-azure-code-samples - codeSamplesRevisionDigest: sha256:e242a7fc42e44d2bbc8e5637d4a6455da7fb3d0307dc275ee4c64867f5c4be55 + codeSamplesRevisionDigest: sha256:a4ace4b17dee92b180a2fede7742bd93fa1a83a9f96e4f61531289cafc50f6ad mistralai-gcp-sdk: source: mistral-google-cloud-source sourceNamespace: mistral-openapi-google-cloud @@ -40,7 +40,7 @@ targets: sourceRevisionDigest: sha256:fcdf40207fa983a6ad7c0346f499d2b87ef2f1381a48e791a9a485bd40525ff8 sourceBlobDigest: sha256:64fa97f0836a2441e32bc255ced698234c147c790775988102f17b493f0da87d codeSamplesNamespace: mistral-openapi-code-samples - codeSamplesRevisionDigest: sha256:d53b8d8fa0068fc51569bdde2e319d0d2abdb019c80180d926a1a1a1f7fb56b3 + codeSamplesRevisionDigest: sha256:f215393d076966760f8b24cb3db48d6dd9c2ba2e06daa7964860bab4b09f51f2 workflow: workflowVersion: 1.0.0 speakeasyVersion: 1.568.2 diff --git a/RELEASES.md b/RELEASES.md index 86c423ef..2e99de4a 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -288,4 +288,14 @@ Based on: ### Generated - [python v1.9.7] . ### Releases -- [PyPI v1.9.7] https://pypi.org/project/mistralai/1.9.7 - . \ No newline at end of file +- [PyPI v1.9.7] https://pypi.org/project/mistralai/1.9.7 - . + +## 2025-08-25 14:54:06 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.568.2 (2.634.2) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v1.9.8] . +### Releases +- [PyPI v1.9.8] https://pypi.org/project/mistralai/1.9.8 - . \ No newline at end of file diff --git a/packages/mistralai_azure/.gitignore b/packages/mistralai_azure/.gitignore index 5a82b069..f2ea8c39 100644 --- a/packages/mistralai_azure/.gitignore +++ b/packages/mistralai_azure/.gitignore @@ -1,3 +1,6 @@ +**/__pycache__/ +**/.speakeasy/temp/ +**/.speakeasy/logs/ .speakeasy/reports README-PYPI.md .venv/ diff --git a/packages/mistralai_azure/.speakeasy/gen.lock b/packages/mistralai_azure/.speakeasy/gen.lock index b7d6f3ba..ef80e828 100644 --- a/packages/mistralai_azure/.speakeasy/gen.lock +++ b/packages/mistralai_azure/.speakeasy/gen.lock @@ -1,18 +1,19 @@ lockVersion: 2.0.0 id: dc40fa48-2c4d-46ad-ac8b-270749770f34 management: - docChecksum: 574e96caec9a63dbe3f39d646830f2c2 - docVersion: 0.0.2 - speakeasyVersion: 1.517.3 - generationVersion: 2.548.6 + docChecksum: 87653f040f5b36c90e066870f34c478e + docVersion: 1.0.0 + speakeasyVersion: 1.568.2 + generationVersion: 2.634.2 releaseVersion: 1.6.0 - configChecksum: 3a4d9b204b5731c461ed7279710d5ed6 + configChecksum: 0b604304465a25f89acca310710262d1 published: true features: python: additionalDependencies: 1.0.0 + additionalProperties: 1.0.1 constsAndDefaults: 1.0.5 - core: 5.12.3 + core: 5.19.3 defaultEnabledRetries: 0.2.0 enumUnions: 0.1.0 envVarSecurityUsage: 0.3.2 @@ -21,14 +22,14 @@ features: globalSecurity: 3.0.3 globalSecurityCallbacks: 1.0.0 globalSecurityFlattening: 1.0.0 - globalServerURLs: 3.1.0 + globalServerURLs: 3.1.1 methodArguments: 1.0.2 nameOverrides: 3.0.1 nullables: 1.0.1 - openEnums: 1.0.0 + openEnums: 1.0.1 responseFormat: 1.0.1 retries: 3.0.2 - sdkHooks: 1.0.1 + sdkHooks: 1.1.0 serverEvents: 1.0.7 serverEventsSentinels: 0.1.0 serverIDs: 3.0.0 @@ -56,6 +57,10 @@ generatedFiles: - docs/models/content.md - docs/models/contentchunk.md - docs/models/deltamessage.md + - docs/models/document.md + - docs/models/documenturlchunk.md + - docs/models/documenturlchunktype.md + - docs/models/filechunk.md - docs/models/finishreason.md - docs/models/function.md - docs/models/functioncall.md @@ -68,6 +73,13 @@ generatedFiles: - docs/models/jsonschema.md - docs/models/loc.md - docs/models/messages.md + - docs/models/mistralpromptmode.md + - docs/models/ocrimageobject.md + - docs/models/ocrpagedimensions.md + - docs/models/ocrpageobject.md + - docs/models/ocrrequest.md + - docs/models/ocrresponse.md + - docs/models/ocrusageinfo.md - docs/models/prediction.md - docs/models/referencechunk.md - docs/models/referencechunktype.md @@ -98,7 +110,6 @@ generatedFiles: - py.typed - pylintrc - pyproject.toml - - scripts/prepare_readme.py - scripts/publish.sh - src/mistralai_azure/__init__.py - src/mistralai_azure/_hooks/__init__.py @@ -119,6 +130,8 @@ generatedFiles: - src/mistralai_azure/models/completionresponsestreamchoice.py - src/mistralai_azure/models/contentchunk.py - src/mistralai_azure/models/deltamessage.py + - src/mistralai_azure/models/documenturlchunk.py + - src/mistralai_azure/models/filechunk.py - src/mistralai_azure/models/function.py - src/mistralai_azure/models/functioncall.py - src/mistralai_azure/models/functionname.py @@ -126,6 +139,13 @@ generatedFiles: - src/mistralai_azure/models/imageurl.py - src/mistralai_azure/models/imageurlchunk.py - src/mistralai_azure/models/jsonschema.py + - src/mistralai_azure/models/mistralpromptmode.py + - src/mistralai_azure/models/ocrimageobject.py + - src/mistralai_azure/models/ocrpagedimensions.py + - src/mistralai_azure/models/ocrpageobject.py + - src/mistralai_azure/models/ocrrequest.py + - src/mistralai_azure/models/ocrresponse.py + - src/mistralai_azure/models/ocrusageinfo.py - src/mistralai_azure/models/prediction.py - src/mistralai_azure/models/referencechunk.py - src/mistralai_azure/models/responseformat.py @@ -143,13 +163,14 @@ generatedFiles: - src/mistralai_azure/models/usageinfo.py - src/mistralai_azure/models/usermessage.py - src/mistralai_azure/models/validationerror.py + - src/mistralai_azure/ocr.py - src/mistralai_azure/py.typed - - src/mistralai_azure/sdk.py - src/mistralai_azure/sdkconfiguration.py - src/mistralai_azure/types/__init__.py - src/mistralai_azure/types/basemodel.py - src/mistralai_azure/utils/__init__.py - src/mistralai_azure/utils/annotations.py + - src/mistralai_azure/utils/datetimes.py - src/mistralai_azure/utils/enums.py - src/mistralai_azure/utils/eventstreaming.py - src/mistralai_azure/utils/forms.py @@ -171,15 +192,23 @@ examples: responses: "422": application/json: {} - "200": {} chat_completion_v1_chat_completions_post: speakeasy-default-chat-completion-v1-chat-completions-post: requestBody: application/json: {"model": "azureai", "stream": false, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}]} responses: "200": - application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "mistral-small-latest", "usage": {"prompt_tokens": 16, "completion_tokens": 34, "total_tokens": 50}, "created": 1702256327, "choices": [{"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}]} + application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "mistral-small-latest", "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, "created": 1702256327, "choices": []} "422": application/json: {} -examplesVersion: 1.0.0 + ocr_v1_ocr_post: + speakeasy-default-ocr-v1-ocr-post: + requestBody: + application/json: {"model": "CX-9", "document": {"document_url": "https://upset-labourer.net/", "type": "document_url"}} + responses: + "200": + application/json: {"pages": [], "model": "Golf", "usage_info": {"pages_processed": 944919}} + "422": + application/json: {} +examplesVersion: 1.0.2 generatedTests: {} diff --git a/packages/mistralai_azure/.speakeasy/gen.yaml b/packages/mistralai_azure/.speakeasy/gen.yaml index be4a1781..63e2da75 100644 --- a/packages/mistralai_azure/.speakeasy/gen.yaml +++ b/packages/mistralai_azure/.speakeasy/gen.yaml @@ -4,6 +4,7 @@ generation: maintainOpenAPIOrder: true usageSnippets: optionalPropertyRendering: withExample + sdkInitStyle: constructor useClassNamesForArrayFields: true fixes: nameResolutionDec2023: true @@ -11,9 +12,14 @@ generation: parameterOrderingFeb2024: true requestResponseComponentNamesFeb2024: true securityFeb2025: false + sharedErrorComponentsApr2025: false auth: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false + tests: + generateTests: true + generateNewTests: false + skipResponseBodyAssertions: false python: version: 1.6.0 additionalDependencies: @@ -22,6 +28,7 @@ python: pytest-asyncio: ^0.23.7 authors: - Mistral + baseErrorName: MistralAzureError clientServerStatusCodesAsErrors: true defaultErrorName: SDKError description: Python Client SDK for the Mistral AI API in Azure. @@ -43,8 +50,10 @@ python: inputModelSuffix: input maxMethodParams: 15 methodArguments: infer-optional-args + moduleName: "" outputModelSuffix: output packageName: mistralai_azure + pytestFilterWarnings: [] pytestTimeout: 0 responseFormat: flat templateVersion: v2 diff --git a/packages/mistralai_azure/docs/models/chatcompletionrequest.md b/packages/mistralai_azure/docs/models/chatcompletionrequest.md index a9a174fb..b0f05d37 100644 --- a/packages/mistralai_azure/docs/models/chatcompletionrequest.md +++ b/packages/mistralai_azure/docs/models/chatcompletionrequest.md @@ -5,7 +5,6 @@ | Field | Type | Required | Description | Example | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `messages` | List[[models.ChatCompletionRequestMessages](../models/chatcompletionrequestmessages.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | | `model` | *Optional[str]* | :heavy_minus_sign: | The ID of the model to use for this request. | azureai | | `temperature` | *OptionalNullable[float]* | :heavy_minus_sign: | What sampling temperature to use, we recommend between 0.0 and 0.7. Higher values like 0.7 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both. The default value varies depending on the model you are targeting. Call the `/models` endpoint to retrieve the appropriate value. | | | `top_p` | *Optional[float]* | :heavy_minus_sign: | Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both. | | @@ -13,6 +12,7 @@ | `stream` | *Optional[bool]* | :heavy_minus_sign: | Whether to stream back partial progress. If set, tokens will be sent as data-only server-side events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. | | | `stop` | [Optional[models.ChatCompletionRequestStop]](../models/chatcompletionrequeststop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | +| `messages` | List[[models.ChatCompletionRequestMessages](../models/chatcompletionrequestmessages.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | | `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | N/A | | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.ChatCompletionRequestToolChoice]](../models/chatcompletionrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | @@ -21,4 +21,5 @@ | `n` | *OptionalNullable[int]* | :heavy_minus_sign: | Number of completions to return for each request, input tokens are only billed once. | | | `prediction` | [Optional[models.Prediction]](../models/prediction.md) | :heavy_minus_sign: | N/A | | | `parallel_tool_calls` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `prompt_mode` | [OptionalNullable[models.MistralPromptMode]](../models/mistralpromptmode.md) | :heavy_minus_sign: | Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. | | | `safe_prompt` | *Optional[bool]* | :heavy_minus_sign: | Whether to inject a safety prompt before all conversations. | | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/chatcompletionresponse.md b/packages/mistralai_azure/docs/models/chatcompletionresponse.md index ad376158..a0465ffb 100644 --- a/packages/mistralai_azure/docs/models/chatcompletionresponse.md +++ b/packages/mistralai_azure/docs/models/chatcompletionresponse.md @@ -9,5 +9,5 @@ | `object` | *str* | :heavy_check_mark: | N/A | chat.completion | | `model` | *str* | :heavy_check_mark: | N/A | mistral-small-latest | | `usage` | [models.UsageInfo](../models/usageinfo.md) | :heavy_check_mark: | N/A | | -| `created` | *Optional[int]* | :heavy_minus_sign: | N/A | 1702256327 | -| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_minus_sign: | N/A | | \ No newline at end of file +| `created` | *int* | :heavy_check_mark: | N/A | 1702256327 | +| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/chatcompletionstreamrequest.md b/packages/mistralai_azure/docs/models/chatcompletionstreamrequest.md index b3e06e7a..90397dec 100644 --- a/packages/mistralai_azure/docs/models/chatcompletionstreamrequest.md +++ b/packages/mistralai_azure/docs/models/chatcompletionstreamrequest.md @@ -5,7 +5,6 @@ | Field | Type | Required | Description | Example | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `messages` | List[[models.Messages](../models/messages.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | | `model` | *Optional[str]* | :heavy_minus_sign: | The ID of the model to use for this request. | azureai | | `temperature` | *OptionalNullable[float]* | :heavy_minus_sign: | What sampling temperature to use, we recommend between 0.0 and 0.7. Higher values like 0.7 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both. The default value varies depending on the model you are targeting. Call the `/models` endpoint to retrieve the appropriate value. | | | `top_p` | *Optional[float]* | :heavy_minus_sign: | Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both. | | @@ -13,6 +12,7 @@ | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | | `stop` | [Optional[models.Stop]](../models/stop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | +| `messages` | List[[models.Messages](../models/messages.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | | `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | N/A | | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.ChatCompletionStreamRequestToolChoice]](../models/chatcompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | @@ -21,4 +21,5 @@ | `n` | *OptionalNullable[int]* | :heavy_minus_sign: | Number of completions to return for each request, input tokens are only billed once. | | | `prediction` | [Optional[models.Prediction]](../models/prediction.md) | :heavy_minus_sign: | N/A | | | `parallel_tool_calls` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `prompt_mode` | [OptionalNullable[models.MistralPromptMode]](../models/mistralpromptmode.md) | :heavy_minus_sign: | Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. | | | `safe_prompt` | *Optional[bool]* | :heavy_minus_sign: | Whether to inject a safety prompt before all conversations. | | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/completionchunk.md b/packages/mistralai_azure/docs/models/completionchunk.md index b8ae6a09..7f8ab5e6 100644 --- a/packages/mistralai_azure/docs/models/completionchunk.md +++ b/packages/mistralai_azure/docs/models/completionchunk.md @@ -6,8 +6,8 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | | `id` | *str* | :heavy_check_mark: | N/A | -| `model` | *str* | :heavy_check_mark: | N/A | -| `choices` | List[[models.CompletionResponseStreamChoice](../models/completionresponsestreamchoice.md)] | :heavy_check_mark: | N/A | | `object` | *Optional[str]* | :heavy_minus_sign: | N/A | | `created` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `usage` | [Optional[models.UsageInfo]](../models/usageinfo.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `model` | *str* | :heavy_check_mark: | N/A | +| `usage` | [Optional[models.UsageInfo]](../models/usageinfo.md) | :heavy_minus_sign: | N/A | +| `choices` | List[[models.CompletionResponseStreamChoice](../models/completionresponsestreamchoice.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/document.md b/packages/mistralai_azure/docs/models/document.md new file mode 100644 index 00000000..509d43b7 --- /dev/null +++ b/packages/mistralai_azure/docs/models/document.md @@ -0,0 +1,25 @@ +# Document + +Document to run OCR on + + +## Supported Types + +### `models.FileChunk` + +```python +value: models.FileChunk = /* values here */ +``` + +### `models.DocumentURLChunk` + +```python +value: models.DocumentURLChunk = /* values here */ +``` + +### `models.ImageURLChunk` + +```python +value: models.ImageURLChunk = /* values here */ +``` + diff --git a/packages/mistralai_azure/docs/models/documenturlchunk.md b/packages/mistralai_azure/docs/models/documenturlchunk.md new file mode 100644 index 00000000..6c9a5b4d --- /dev/null +++ b/packages/mistralai_azure/docs/models/documenturlchunk.md @@ -0,0 +1,10 @@ +# DocumentURLChunk + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `document_url` | *str* | :heavy_check_mark: | N/A | +| `document_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The filename of the document | +| `type` | [Optional[models.DocumentURLChunkType]](../models/documenturlchunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/documenturlchunktype.md b/packages/mistralai_azure/docs/models/documenturlchunktype.md new file mode 100644 index 00000000..32e1fa9e --- /dev/null +++ b/packages/mistralai_azure/docs/models/documenturlchunktype.md @@ -0,0 +1,8 @@ +# DocumentURLChunkType + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `DOCUMENT_URL` | document_url | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/filechunk.md b/packages/mistralai_azure/docs/models/filechunk.md new file mode 100644 index 00000000..18217114 --- /dev/null +++ b/packages/mistralai_azure/docs/models/filechunk.md @@ -0,0 +1,9 @@ +# FileChunk + + +## Fields + +| Field | Type | Required | Description | +| --------------------------- | --------------------------- | --------------------------- | --------------------------- | +| `type` | *Optional[Literal["file"]]* | :heavy_minus_sign: | N/A | +| `file_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/function.md b/packages/mistralai_azure/docs/models/function.md index a166b7bb..b2bdb3fe 100644 --- a/packages/mistralai_azure/docs/models/function.md +++ b/packages/mistralai_azure/docs/models/function.md @@ -6,6 +6,6 @@ | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | | `name` | *str* | :heavy_check_mark: | N/A | -| `parameters` | Dict[str, *Any*] | :heavy_check_mark: | N/A | | `description` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `strict` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `strict` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `parameters` | Dict[str, *Any*] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/jsonschema.md b/packages/mistralai_azure/docs/models/jsonschema.md index ae387867..7ff7c070 100644 --- a/packages/mistralai_azure/docs/models/jsonschema.md +++ b/packages/mistralai_azure/docs/models/jsonschema.md @@ -6,6 +6,6 @@ | Field | Type | Required | Description | | ----------------------- | ----------------------- | ----------------------- | ----------------------- | | `name` | *str* | :heavy_check_mark: | N/A | -| `schema_definition` | Dict[str, *Any*] | :heavy_check_mark: | N/A | | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `schema_definition` | Dict[str, *Any*] | :heavy_check_mark: | N/A | | `strict` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/mistralpromptmode.md b/packages/mistralai_azure/docs/models/mistralpromptmode.md new file mode 100644 index 00000000..7416e203 --- /dev/null +++ b/packages/mistralai_azure/docs/models/mistralpromptmode.md @@ -0,0 +1,8 @@ +# MistralPromptMode + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `REASONING` | reasoning | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrimageobject.md b/packages/mistralai_azure/docs/models/ocrimageobject.md new file mode 100644 index 00000000..3c0d5544 --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrimageobject.md @@ -0,0 +1,14 @@ +# OCRImageObject + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `id` | *str* | :heavy_check_mark: | Image ID for extracted image in a page | +| `top_left_x` | *Nullable[int]* | :heavy_check_mark: | X coordinate of top-left corner of the extracted image | +| `top_left_y` | *Nullable[int]* | :heavy_check_mark: | Y coordinate of top-left corner of the extracted image | +| `bottom_right_x` | *Nullable[int]* | :heavy_check_mark: | X coordinate of bottom-right corner of the extracted image | +| `bottom_right_y` | *Nullable[int]* | :heavy_check_mark: | Y coordinate of bottom-right corner of the extracted image | +| `image_base64` | *OptionalNullable[str]* | :heavy_minus_sign: | Base64 string of the extracted image | +| `image_annotation` | *OptionalNullable[str]* | :heavy_minus_sign: | Annotation of the extracted image in json str | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrpagedimensions.md b/packages/mistralai_azure/docs/models/ocrpagedimensions.md new file mode 100644 index 00000000..c93ca64d --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrpagedimensions.md @@ -0,0 +1,10 @@ +# OCRPageDimensions + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------- | ------------------------------- | ------------------------------- | ------------------------------- | +| `dpi` | *int* | :heavy_check_mark: | Dots per inch of the page-image | +| `height` | *int* | :heavy_check_mark: | Height of the image in pixels | +| `width` | *int* | :heavy_check_mark: | Width of the image in pixels | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrpageobject.md b/packages/mistralai_azure/docs/models/ocrpageobject.md new file mode 100644 index 00000000..9db3bb77 --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrpageobject.md @@ -0,0 +1,11 @@ +# OCRPageObject + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `index` | *int* | :heavy_check_mark: | The page index in a pdf document starting from 0 | +| `markdown` | *str* | :heavy_check_mark: | The markdown string response of the page | +| `images` | List[[models.OCRImageObject](../models/ocrimageobject.md)] | :heavy_check_mark: | List of all extracted images in the page | +| `dimensions` | [Nullable[models.OCRPageDimensions]](../models/ocrpagedimensions.md) | :heavy_check_mark: | The dimensions of the PDF Page's screenshot image | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrrequest.md b/packages/mistralai_azure/docs/models/ocrrequest.md new file mode 100644 index 00000000..6a9c77ab --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrrequest.md @@ -0,0 +1,16 @@ +# OCRRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `document` | [models.Document](../models/document.md) | :heavy_check_mark: | Document to run OCR on | +| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | +| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | +| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | +| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | +| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | +| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrresponse.md b/packages/mistralai_azure/docs/models/ocrresponse.md new file mode 100644 index 00000000..0a309317 --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrresponse.md @@ -0,0 +1,11 @@ +# OCRResponse + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `pages` | List[[models.OCRPageObject](../models/ocrpageobject.md)] | :heavy_check_mark: | List of OCR info for pages. | +| `model` | *str* | :heavy_check_mark: | The model used to generate the OCR. | +| `document_annotation` | *OptionalNullable[str]* | :heavy_minus_sign: | Formatted response in the request_format if provided in json str | +| `usage_info` | [models.OCRUsageInfo](../models/ocrusageinfo.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/ocrusageinfo.md b/packages/mistralai_azure/docs/models/ocrusageinfo.md new file mode 100644 index 00000000..d9d79125 --- /dev/null +++ b/packages/mistralai_azure/docs/models/ocrusageinfo.md @@ -0,0 +1,9 @@ +# OCRUsageInfo + + +## Fields + +| Field | Type | Required | Description | +| ------------------------- | ------------------------- | ------------------------- | ------------------------- | +| `pages_processed` | *int* | :heavy_check_mark: | Number of pages processed | +| `doc_size_bytes` | *OptionalNullable[int]* | :heavy_minus_sign: | Document size in bytes | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/tool.md b/packages/mistralai_azure/docs/models/tool.md index 822f86f8..fb661f72 100644 --- a/packages/mistralai_azure/docs/models/tool.md +++ b/packages/mistralai_azure/docs/models/tool.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | -| `function` | [models.Function](../models/function.md) | :heavy_check_mark: | N/A | -| `type` | [Optional[models.ToolTypes]](../models/tooltypes.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `type` | [Optional[models.ToolTypes]](../models/tooltypes.md) | :heavy_minus_sign: | N/A | +| `function` | [models.Function](../models/function.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/toolcall.md b/packages/mistralai_azure/docs/models/toolcall.md index 574be1ea..3819236b 100644 --- a/packages/mistralai_azure/docs/models/toolcall.md +++ b/packages/mistralai_azure/docs/models/toolcall.md @@ -5,7 +5,7 @@ | Field | Type | Required | Description | | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | -| `function` | [models.FunctionCall](../models/functioncall.md) | :heavy_check_mark: | N/A | | `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | `type` | [Optional[models.ToolTypes]](../models/tooltypes.md) | :heavy_minus_sign: | N/A | +| `function` | [models.FunctionCall](../models/functioncall.md) | :heavy_check_mark: | N/A | | `index` | *Optional[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/toolchoice.md b/packages/mistralai_azure/docs/models/toolchoice.md index 792ebcd6..373046bb 100644 --- a/packages/mistralai_azure/docs/models/toolchoice.md +++ b/packages/mistralai_azure/docs/models/toolchoice.md @@ -7,5 +7,5 @@ ToolChoice is either a ToolChoiceEnum or a ToolChoice | Field | Type | Required | Description | | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | -| `function` | [models.FunctionName](../models/functionname.md) | :heavy_check_mark: | this restriction of `Function` is used to select a specific function to call | -| `type` | [Optional[models.ToolTypes]](../models/tooltypes.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `type` | [Optional[models.ToolTypes]](../models/tooltypes.md) | :heavy_minus_sign: | N/A | +| `function` | [models.FunctionName](../models/functionname.md) | :heavy_check_mark: | this restriction of `Function` is used to select a specific function to call | \ No newline at end of file diff --git a/packages/mistralai_azure/docs/models/usageinfo.md b/packages/mistralai_azure/docs/models/usageinfo.md index 9f56a3ae..f5204ac9 100644 --- a/packages/mistralai_azure/docs/models/usageinfo.md +++ b/packages/mistralai_azure/docs/models/usageinfo.md @@ -3,8 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------- | ------------------- | ------------------- | ------------------- | ------------------- | -| `prompt_tokens` | *int* | :heavy_check_mark: | N/A | 16 | -| `completion_tokens` | *int* | :heavy_check_mark: | N/A | 34 | -| `total_tokens` | *int* | :heavy_check_mark: | N/A | 50 | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------- | ----------------------- | ----------------------- | ----------------------- | +| `prompt_tokens` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `completion_tokens` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `total_tokens` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `prompt_audio_seconds` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `__pydantic_extra__` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/packages/mistralai_azure/poetry.lock b/packages/mistralai_azure/poetry.lock index 8b70ddcc..fb5b615e 100644 --- a/packages/mistralai_azure/poetry.lock +++ b/packages/mistralai_azure/poetry.lock @@ -91,21 +91,6 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] -[[package]] -name = "eval-type-backport" -version = "0.2.0" -description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, - {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, -] - -[package.extras] -tests = ["pytest"] - [[package]] name = "exceptiongroup" version = "1.2.2" @@ -124,37 +109,37 @@ test = ["pytest (>=6)"] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" @@ -234,50 +219,44 @@ files = [ [[package]] name = "mypy" -version = "1.14.1" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, - {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, - {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, - {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, - {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, - {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, - {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, - {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, - {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, - {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, - {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, - {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, - {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, - {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, - {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, - {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] @@ -351,20 +330,21 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.10.6" +version = "2.11.7" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" +pydantic-core = "2.33.2" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -372,112 +352,111 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.2" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, ] [package.dependencies] @@ -556,33 +535,6 @@ pytest = ">=7.0.0,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main"] -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - [[package]] name = "sniffio" version = "1.3.1" @@ -620,18 +572,6 @@ files = [ {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240316" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, -] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -661,5 +601,5 @@ typing-extensions = ">=4.12.0" [metadata] lock-version = "2.1" -python-versions = ">=3.9" -content-hash = "16a8e7bb56287babdd384870773880315911c8f1851d21314cf11ca92104c600" +python-versions = ">=3.9.2" +content-hash = "7783570c2127219ca33415a933b1f4b8e43c4b432d4f04679552c25f89596fc1" diff --git a/packages/mistralai_azure/pylintrc b/packages/mistralai_azure/pylintrc index 266bc815..a8fcb932 100644 --- a/packages/mistralai_azure/pylintrc +++ b/packages/mistralai_azure/pylintrc @@ -188,8 +188,8 @@ good-names=i, Run, _, e, - n, - id + id, + n # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted @@ -458,7 +458,8 @@ disable=raw-checker-failed, relative-beyond-top-level, consider-using-with, wildcard-import, - unused-wildcard-import + unused-wildcard-import, + too-many-return-statements # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -659,4 +660,4 @@ init-import=no # List of qualified module names which can have objects that can redefine # builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io \ No newline at end of file diff --git a/packages/mistralai_azure/pyproject.toml b/packages/mistralai_azure/pyproject.toml index cca906a7..81387b21 100644 --- a/packages/mistralai_azure/pyproject.toml +++ b/packages/mistralai_azure/pyproject.toml @@ -3,14 +3,12 @@ name = "mistralai_azure" version = "1.6.0" description = "Python Client SDK for the Mistral AI API in Azure." authors = [{ name = "Mistral" },] -readme = "README-PYPI.md" -requires-python = ">=3.9" +readme = "README.md" +requires-python = ">=3.9.2" dependencies = [ - "eval-type-backport >=0.2.0", + "httpcore >=1.0.9", "httpx >=0.28.1", - "pydantic >=2.10.3", - "python-dateutil >=2.8.2", - "typing-inspection >=0.4.0", + "pydantic >=2.11.2", ] [tool.poetry] @@ -26,11 +24,10 @@ include = ["py.typed", "src/mistralai_azure/py.typed"] in-project = true [tool.poetry.group.dev.dependencies] -mypy = "==1.14.1" +mypy = "==1.15.0" pylint = "==3.2.3" pytest = "^8.2.2" pytest-asyncio = "^0.23.7" -types-python-dateutil = "^2.9.0.20240316" [build-system] requires = ["poetry-core"] @@ -42,6 +39,8 @@ pythonpath = ["src"] [tool.mypy] disable_error_code = "misc" +explicit_package_bases = true +mypy_path = "src" [[tool.mypy.overrides]] module = "typing_inspect" diff --git a/packages/mistralai_azure/scripts/prepare_readme.py b/packages/mistralai_azure/scripts/prepare_readme.py deleted file mode 100644 index 825d9ded..00000000 --- a/packages/mistralai_azure/scripts/prepare_readme.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -import shutil - -try: - shutil.copyfile("README.md", "README-PYPI.md") -except Exception as e: - print("Failed to copy README.md to README-PYPI.md") - print(e) diff --git a/packages/mistralai_azure/scripts/publish.sh b/packages/mistralai_azure/scripts/publish.sh index f2f2cf2c..1ee7194c 100755 --- a/packages/mistralai_azure/scripts/publish.sh +++ b/packages/mistralai_azure/scripts/publish.sh @@ -2,6 +2,4 @@ export POETRY_PYPI_TOKEN_PYPI=${PYPI_TOKEN} -poetry run python scripts/prepare_readme.py - poetry publish --build --skip-existing diff --git a/packages/mistralai_azure/src/mistralai_azure/_hooks/types.py b/packages/mistralai_azure/src/mistralai_azure/_hooks/types.py index 297dfa2f..0c22d7eb 100644 --- a/packages/mistralai_azure/src/mistralai_azure/_hooks/types.py +++ b/packages/mistralai_azure/src/mistralai_azure/_hooks/types.py @@ -3,10 +3,12 @@ from abc import ABC, abstractmethod import httpx from mistralai_azure.httpclient import HttpClient +from mistralai_azure.sdkconfiguration import SDKConfiguration from typing import Any, Callable, List, Optional, Tuple, Union class HookContext: + config: SDKConfiguration base_url: str operation_id: str oauth2_scopes: Optional[List[str]] = None @@ -14,11 +16,13 @@ class HookContext: def __init__( self, + config: SDKConfiguration, base_url: str, operation_id: str, oauth2_scopes: Optional[List[str]], security_source: Optional[Union[Any, Callable[[], Any]]], ): + self.config = config self.base_url = base_url self.operation_id = operation_id self.oauth2_scopes = oauth2_scopes @@ -28,6 +32,7 @@ def __init__( class BeforeRequestContext(HookContext): def __init__(self, hook_ctx: HookContext): super().__init__( + hook_ctx.config, hook_ctx.base_url, hook_ctx.operation_id, hook_ctx.oauth2_scopes, @@ -38,6 +43,7 @@ def __init__(self, hook_ctx: HookContext): class AfterSuccessContext(HookContext): def __init__(self, hook_ctx: HookContext): super().__init__( + hook_ctx.config, hook_ctx.base_url, hook_ctx.operation_id, hook_ctx.oauth2_scopes, @@ -48,6 +54,7 @@ def __init__(self, hook_ctx: HookContext): class AfterErrorContext(HookContext): def __init__(self, hook_ctx: HookContext): super().__init__( + hook_ctx.config, hook_ctx.base_url, hook_ctx.operation_id, hook_ctx.oauth2_scopes, diff --git a/packages/mistralai_azure/src/mistralai_azure/_version.py b/packages/mistralai_azure/src/mistralai_azure/_version.py index 65696610..5fd03467 100644 --- a/packages/mistralai_azure/src/mistralai_azure/_version.py +++ b/packages/mistralai_azure/src/mistralai_azure/_version.py @@ -4,9 +4,9 @@ __title__: str = "mistralai_azure" __version__: str = "1.6.0" -__openapi_doc_version__: str = "0.0.2" -__gen_version__: str = "2.548.6" -__user_agent__: str = "speakeasy-sdk/python 1.6.0 2.548.6 0.0.2 mistralai_azure" +__openapi_doc_version__: str = "1.0.0" +__gen_version__: str = "2.634.2" +__user_agent__: str = "speakeasy-sdk/python 1.6.0 2.634.2 1.0.0 mistralai_azure" try: if __package__ is not None: diff --git a/packages/mistralai_azure/src/mistralai_azure/basesdk.py b/packages/mistralai_azure/src/mistralai_azure/basesdk.py index 24e4935e..84738ce8 100644 --- a/packages/mistralai_azure/src/mistralai_azure/basesdk.py +++ b/packages/mistralai_azure/src/mistralai_azure/basesdk.py @@ -218,12 +218,12 @@ def do_request( client = self.sdk_configuration.client logger = self.sdk_configuration.debug_logger + hooks = self.sdk_configuration.__dict__["_hooks"] + def do(): http_res = None try: - req = self.sdk_configuration.get_hooks().before_request( - BeforeRequestContext(hook_ctx), request - ) + req = hooks.before_request(BeforeRequestContext(hook_ctx), request) logger.debug( "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", req.method, @@ -237,9 +237,7 @@ def do(): http_res = client.send(req, stream=stream) except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error( - AfterErrorContext(hook_ctx), None, e - ) + _, e = hooks.after_error(AfterErrorContext(hook_ctx), None, e) if e is not None: logger.debug("Request Exception", exc_info=True) raise e @@ -257,7 +255,7 @@ def do(): ) if utils.match_status_codes(error_status_codes, http_res.status_code): - result, err = self.sdk_configuration.get_hooks().after_error( + result, err = hooks.after_error( AfterErrorContext(hook_ctx), http_res, None ) if err is not None: @@ -277,9 +275,7 @@ def do(): http_res = do() if not utils.match_status_codes(error_status_codes, http_res.status_code): - http_res = self.sdk_configuration.get_hooks().after_success( - AfterSuccessContext(hook_ctx), http_res - ) + http_res = hooks.after_success(AfterSuccessContext(hook_ctx), http_res) return http_res @@ -294,12 +290,12 @@ async def do_request_async( client = self.sdk_configuration.async_client logger = self.sdk_configuration.debug_logger + hooks = self.sdk_configuration.__dict__["_hooks"] + async def do(): http_res = None try: - req = self.sdk_configuration.get_hooks().before_request( - BeforeRequestContext(hook_ctx), request - ) + req = hooks.before_request(BeforeRequestContext(hook_ctx), request) logger.debug( "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", req.method, @@ -313,9 +309,7 @@ async def do(): http_res = await client.send(req, stream=stream) except Exception as e: - _, e = self.sdk_configuration.get_hooks().after_error( - AfterErrorContext(hook_ctx), None, e - ) + _, e = hooks.after_error(AfterErrorContext(hook_ctx), None, e) if e is not None: logger.debug("Request Exception", exc_info=True) raise e @@ -333,7 +327,7 @@ async def do(): ) if utils.match_status_codes(error_status_codes, http_res.status_code): - result, err = self.sdk_configuration.get_hooks().after_error( + result, err = hooks.after_error( AfterErrorContext(hook_ctx), http_res, None ) if err is not None: @@ -355,8 +349,6 @@ async def do(): http_res = await do() if not utils.match_status_codes(error_status_codes, http_res.status_code): - http_res = self.sdk_configuration.get_hooks().after_success( - AfterSuccessContext(hook_ctx), http_res - ) + http_res = hooks.after_success(AfterSuccessContext(hook_ctx), http_res) return http_res diff --git a/packages/mistralai_azure/src/mistralai_azure/chat.py b/packages/mistralai_azure/src/mistralai_azure/chat.py index cf3511fd..20184014 100644 --- a/packages/mistralai_azure/src/mistralai_azure/chat.py +++ b/packages/mistralai_azure/src/mistralai_azure/chat.py @@ -41,6 +41,7 @@ def stream( Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -67,6 +68,7 @@ def stream( :param n: Number of completions to return for each request, input tokens are only billed once. :param prediction: :param parallel_tool_calls: + :param prompt_mode: Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. :param safe_prompt: Whether to inject a safety prompt before all conversations. :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method @@ -106,6 +108,7 @@ def stream( prediction, Optional[models.Prediction] ), parallel_tool_calls=parallel_tool_calls, + prompt_mode=prompt_mode, safe_prompt=safe_prompt, ) @@ -138,6 +141,7 @@ def stream( http_res = self.do_request( hook_ctx=HookContext( + config=self.sdk_configuration, base_url=base_url or "", operation_id="stream_chat", oauth2_scopes=[], @@ -212,6 +216,7 @@ async def stream_async( Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -238,6 +243,7 @@ async def stream_async( :param n: Number of completions to return for each request, input tokens are only billed once. :param prediction: :param parallel_tool_calls: + :param prompt_mode: Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. :param safe_prompt: Whether to inject a safety prompt before all conversations. :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method @@ -277,6 +283,7 @@ async def stream_async( prediction, Optional[models.Prediction] ), parallel_tool_calls=parallel_tool_calls, + prompt_mode=prompt_mode, safe_prompt=safe_prompt, ) @@ -309,6 +316,7 @@ async def stream_async( http_res = await self.do_request_async( hook_ctx=HookContext( + config=self.sdk_configuration, base_url=base_url or "", operation_id="stream_chat", oauth2_scopes=[], @@ -391,6 +399,7 @@ def complete( Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -415,6 +424,7 @@ def complete( :param n: Number of completions to return for each request, input tokens are only billed once. :param prediction: :param parallel_tool_calls: + :param prompt_mode: Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. :param safe_prompt: Whether to inject a safety prompt before all conversations. :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method @@ -456,6 +466,7 @@ def complete( prediction, Optional[models.Prediction] ), parallel_tool_calls=parallel_tool_calls, + prompt_mode=prompt_mode, safe_prompt=safe_prompt, ) @@ -488,6 +499,7 @@ def complete( http_res = self.do_request( hook_ctx=HookContext( + config=self.sdk_configuration, base_url=base_url or "", operation_id="chat_completion_v1_chat_completions_post", oauth2_scopes=[], @@ -566,6 +578,7 @@ async def complete_async( Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -590,6 +603,7 @@ async def complete_async( :param n: Number of completions to return for each request, input tokens are only billed once. :param prediction: :param parallel_tool_calls: + :param prompt_mode: Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used. :param safe_prompt: Whether to inject a safety prompt before all conversations. :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method @@ -631,6 +645,7 @@ async def complete_async( prediction, Optional[models.Prediction] ), parallel_tool_calls=parallel_tool_calls, + prompt_mode=prompt_mode, safe_prompt=safe_prompt, ) @@ -663,6 +678,7 @@ async def complete_async( http_res = await self.do_request_async( hook_ctx=HookContext( + config=self.sdk_configuration, base_url=base_url or "", operation_id="chat_completion_v1_chat_completions_post", oauth2_scopes=[], diff --git a/packages/mistralai_azure/src/mistralai_azure/httpclient.py b/packages/mistralai_azure/src/mistralai_azure/httpclient.py index 1e426352..47b052cb 100644 --- a/packages/mistralai_azure/src/mistralai_azure/httpclient.py +++ b/packages/mistralai_azure/src/mistralai_azure/httpclient.py @@ -2,7 +2,6 @@ # pyright: reportReturnType = false import asyncio -from concurrent.futures import ThreadPoolExecutor from typing_extensions import Protocol, runtime_checkable import httpx from typing import Any, Optional, Union @@ -116,21 +115,12 @@ def close_clients( pass if async_client is not None and not async_client_supplied: - is_async = False try: - asyncio.get_running_loop() - is_async = True + loop = asyncio.get_running_loop() + asyncio.run_coroutine_threadsafe(async_client.aclose(), loop) except RuntimeError: - pass - - try: - # If this function is called in an async loop then start another - # loop in a separate thread to close the async http client. - if is_async: - with ThreadPoolExecutor(max_workers=1) as executor: - future = executor.submit(asyncio.run, async_client.aclose()) - future.result() - else: + try: asyncio.run(async_client.aclose()) - except Exception: - pass + except RuntimeError: + # best effort + pass diff --git a/packages/mistralai_azure/src/mistralai_azure/models/__init__.py b/packages/mistralai_azure/src/mistralai_azure/models/__init__.py index 2229c469..bc1a3f4f 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/__init__.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/__init__.py @@ -1,109 +1,134 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from .assistantmessage import ( - AssistantMessage, - AssistantMessageContent, - AssistantMessageContentTypedDict, - AssistantMessageRole, - AssistantMessageTypedDict, -) -from .chatcompletionchoice import ( - ChatCompletionChoice, - ChatCompletionChoiceFinishReason, - ChatCompletionChoiceTypedDict, -) -from .chatcompletionrequest import ( - ChatCompletionRequest, - ChatCompletionRequestMessages, - ChatCompletionRequestMessagesTypedDict, - ChatCompletionRequestStop, - ChatCompletionRequestStopTypedDict, - ChatCompletionRequestToolChoice, - ChatCompletionRequestToolChoiceTypedDict, - ChatCompletionRequestTypedDict, -) -from .chatcompletionresponse import ( - ChatCompletionResponse, - ChatCompletionResponseTypedDict, -) -from .chatcompletionstreamrequest import ( - ChatCompletionStreamRequest, - ChatCompletionStreamRequestToolChoice, - ChatCompletionStreamRequestToolChoiceTypedDict, - ChatCompletionStreamRequestTypedDict, - Messages, - MessagesTypedDict, - Stop, - StopTypedDict, -) -from .completionchunk import CompletionChunk, CompletionChunkTypedDict -from .completionevent import CompletionEvent, CompletionEventTypedDict -from .completionresponsestreamchoice import ( - CompletionResponseStreamChoice, - CompletionResponseStreamChoiceTypedDict, - FinishReason, -) -from .contentchunk import ContentChunk, ContentChunkTypedDict -from .deltamessage import Content, ContentTypedDict, DeltaMessage, DeltaMessageTypedDict -from .function import Function, FunctionTypedDict -from .functioncall import ( - Arguments, - ArgumentsTypedDict, - FunctionCall, - FunctionCallTypedDict, -) -from .functionname import FunctionName, FunctionNameTypedDict -from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData -from .imageurl import ImageURL, ImageURLTypedDict -from .imageurlchunk import ( - ImageURLChunk, - ImageURLChunkImageURL, - ImageURLChunkImageURLTypedDict, - ImageURLChunkType, - ImageURLChunkTypedDict, -) -from .jsonschema import JSONSchema, JSONSchemaTypedDict -from .prediction import Prediction, PredictionTypedDict -from .referencechunk import ReferenceChunk, ReferenceChunkType, ReferenceChunkTypedDict -from .responseformat import ResponseFormat, ResponseFormatTypedDict -from .responseformats import ResponseFormats -from .sdkerror import SDKError -from .security import Security, SecurityTypedDict -from .systemmessage import ( - Role, - SystemMessage, - SystemMessageContent, - SystemMessageContentTypedDict, - SystemMessageTypedDict, -) -from .textchunk import TextChunk, TextChunkTypedDict, Type -from .tool import Tool, ToolTypedDict -from .toolcall import ToolCall, ToolCallTypedDict -from .toolchoice import ToolChoice, ToolChoiceTypedDict -from .toolchoiceenum import ToolChoiceEnum -from .toolmessage import ( - ToolMessage, - ToolMessageContent, - ToolMessageContentTypedDict, - ToolMessageRole, - ToolMessageTypedDict, -) -from .tooltypes import ToolTypes -from .usageinfo import UsageInfo, UsageInfoTypedDict -from .usermessage import ( - UserMessage, - UserMessageContent, - UserMessageContentTypedDict, - UserMessageRole, - UserMessageTypedDict, -) -from .validationerror import ( - Loc, - LocTypedDict, - ValidationError, - ValidationErrorTypedDict, -) +from typing import TYPE_CHECKING +from importlib import import_module +if TYPE_CHECKING: + from .assistantmessage import ( + AssistantMessage, + AssistantMessageContent, + AssistantMessageContentTypedDict, + AssistantMessageRole, + AssistantMessageTypedDict, + ) + from .chatcompletionchoice import ( + ChatCompletionChoice, + ChatCompletionChoiceFinishReason, + ChatCompletionChoiceTypedDict, + ) + from .chatcompletionrequest import ( + ChatCompletionRequest, + ChatCompletionRequestMessages, + ChatCompletionRequestMessagesTypedDict, + ChatCompletionRequestStop, + ChatCompletionRequestStopTypedDict, + ChatCompletionRequestToolChoice, + ChatCompletionRequestToolChoiceTypedDict, + ChatCompletionRequestTypedDict, + ) + from .chatcompletionresponse import ( + ChatCompletionResponse, + ChatCompletionResponseTypedDict, + ) + from .chatcompletionstreamrequest import ( + ChatCompletionStreamRequest, + ChatCompletionStreamRequestToolChoice, + ChatCompletionStreamRequestToolChoiceTypedDict, + ChatCompletionStreamRequestTypedDict, + Messages, + MessagesTypedDict, + Stop, + StopTypedDict, + ) + from .completionchunk import CompletionChunk, CompletionChunkTypedDict + from .completionevent import CompletionEvent, CompletionEventTypedDict + from .completionresponsestreamchoice import ( + CompletionResponseStreamChoice, + CompletionResponseStreamChoiceTypedDict, + FinishReason, + ) + from .contentchunk import ContentChunk, ContentChunkTypedDict + from .deltamessage import ( + Content, + ContentTypedDict, + DeltaMessage, + DeltaMessageTypedDict, + ) + from .documenturlchunk import ( + DocumentURLChunk, + DocumentURLChunkType, + DocumentURLChunkTypedDict, + ) + from .filechunk import FileChunk, FileChunkTypedDict + from .function import Function, FunctionTypedDict + from .functioncall import ( + Arguments, + ArgumentsTypedDict, + FunctionCall, + FunctionCallTypedDict, + ) + from .functionname import FunctionName, FunctionNameTypedDict + from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData + from .imageurl import ImageURL, ImageURLTypedDict + from .imageurlchunk import ( + ImageURLChunk, + ImageURLChunkImageURL, + ImageURLChunkImageURLTypedDict, + ImageURLChunkType, + ImageURLChunkTypedDict, + ) + from .jsonschema import JSONSchema, JSONSchemaTypedDict + from .mistralpromptmode import MistralPromptMode + from .ocrimageobject import OCRImageObject, OCRImageObjectTypedDict + from .ocrpagedimensions import OCRPageDimensions, OCRPageDimensionsTypedDict + from .ocrpageobject import OCRPageObject, OCRPageObjectTypedDict + from .ocrrequest import Document, DocumentTypedDict, OCRRequest, OCRRequestTypedDict + from .ocrresponse import OCRResponse, OCRResponseTypedDict + from .ocrusageinfo import OCRUsageInfo, OCRUsageInfoTypedDict + from .prediction import Prediction, PredictionTypedDict + from .referencechunk import ( + ReferenceChunk, + ReferenceChunkType, + ReferenceChunkTypedDict, + ) + from .responseformat import ResponseFormat, ResponseFormatTypedDict + from .responseformats import ResponseFormats + from .sdkerror import SDKError + from .security import Security, SecurityTypedDict + from .systemmessage import ( + Role, + SystemMessage, + SystemMessageContent, + SystemMessageContentTypedDict, + SystemMessageTypedDict, + ) + from .textchunk import TextChunk, TextChunkTypedDict, Type + from .tool import Tool, ToolTypedDict + from .toolcall import ToolCall, ToolCallTypedDict + from .toolchoice import ToolChoice, ToolChoiceTypedDict + from .toolchoiceenum import ToolChoiceEnum + from .toolmessage import ( + ToolMessage, + ToolMessageContent, + ToolMessageContentTypedDict, + ToolMessageRole, + ToolMessageTypedDict, + ) + from .tooltypes import ToolTypes + from .usageinfo import UsageInfo, UsageInfoTypedDict + from .usermessage import ( + UserMessage, + UserMessageContent, + UserMessageContentTypedDict, + UserMessageRole, + UserMessageTypedDict, + ) + from .validationerror import ( + Loc, + LocTypedDict, + ValidationError, + ValidationErrorTypedDict, + ) __all__ = [ "Arguments", @@ -142,6 +167,13 @@ "ContentTypedDict", "DeltaMessage", "DeltaMessageTypedDict", + "Document", + "DocumentTypedDict", + "DocumentURLChunk", + "DocumentURLChunkType", + "DocumentURLChunkTypedDict", + "FileChunk", + "FileChunkTypedDict", "FinishReason", "Function", "FunctionCall", @@ -164,6 +196,19 @@ "LocTypedDict", "Messages", "MessagesTypedDict", + "MistralPromptMode", + "OCRImageObject", + "OCRImageObjectTypedDict", + "OCRPageDimensions", + "OCRPageDimensionsTypedDict", + "OCRPageObject", + "OCRPageObjectTypedDict", + "OCRRequest", + "OCRRequestTypedDict", + "OCRResponse", + "OCRResponseTypedDict", + "OCRUsageInfo", + "OCRUsageInfoTypedDict", "Prediction", "PredictionTypedDict", "ReferenceChunk", @@ -208,3 +253,153 @@ "ValidationError", "ValidationErrorTypedDict", ] + +_dynamic_imports: dict[str, str] = { + "AssistantMessage": ".assistantmessage", + "AssistantMessageContent": ".assistantmessage", + "AssistantMessageContentTypedDict": ".assistantmessage", + "AssistantMessageRole": ".assistantmessage", + "AssistantMessageTypedDict": ".assistantmessage", + "ChatCompletionChoice": ".chatcompletionchoice", + "ChatCompletionChoiceFinishReason": ".chatcompletionchoice", + "ChatCompletionChoiceTypedDict": ".chatcompletionchoice", + "ChatCompletionRequest": ".chatcompletionrequest", + "ChatCompletionRequestMessages": ".chatcompletionrequest", + "ChatCompletionRequestMessagesTypedDict": ".chatcompletionrequest", + "ChatCompletionRequestStop": ".chatcompletionrequest", + "ChatCompletionRequestStopTypedDict": ".chatcompletionrequest", + "ChatCompletionRequestToolChoice": ".chatcompletionrequest", + "ChatCompletionRequestToolChoiceTypedDict": ".chatcompletionrequest", + "ChatCompletionRequestTypedDict": ".chatcompletionrequest", + "ChatCompletionResponse": ".chatcompletionresponse", + "ChatCompletionResponseTypedDict": ".chatcompletionresponse", + "ChatCompletionStreamRequest": ".chatcompletionstreamrequest", + "ChatCompletionStreamRequestToolChoice": ".chatcompletionstreamrequest", + "ChatCompletionStreamRequestToolChoiceTypedDict": ".chatcompletionstreamrequest", + "ChatCompletionStreamRequestTypedDict": ".chatcompletionstreamrequest", + "Messages": ".chatcompletionstreamrequest", + "MessagesTypedDict": ".chatcompletionstreamrequest", + "Stop": ".chatcompletionstreamrequest", + "StopTypedDict": ".chatcompletionstreamrequest", + "CompletionChunk": ".completionchunk", + "CompletionChunkTypedDict": ".completionchunk", + "CompletionEvent": ".completionevent", + "CompletionEventTypedDict": ".completionevent", + "CompletionResponseStreamChoice": ".completionresponsestreamchoice", + "CompletionResponseStreamChoiceTypedDict": ".completionresponsestreamchoice", + "FinishReason": ".completionresponsestreamchoice", + "ContentChunk": ".contentchunk", + "ContentChunkTypedDict": ".contentchunk", + "Content": ".deltamessage", + "ContentTypedDict": ".deltamessage", + "DeltaMessage": ".deltamessage", + "DeltaMessageTypedDict": ".deltamessage", + "DocumentURLChunk": ".documenturlchunk", + "DocumentURLChunkType": ".documenturlchunk", + "DocumentURLChunkTypedDict": ".documenturlchunk", + "FileChunk": ".filechunk", + "FileChunkTypedDict": ".filechunk", + "Function": ".function", + "FunctionTypedDict": ".function", + "Arguments": ".functioncall", + "ArgumentsTypedDict": ".functioncall", + "FunctionCall": ".functioncall", + "FunctionCallTypedDict": ".functioncall", + "FunctionName": ".functionname", + "FunctionNameTypedDict": ".functionname", + "HTTPValidationError": ".httpvalidationerror", + "HTTPValidationErrorData": ".httpvalidationerror", + "ImageURL": ".imageurl", + "ImageURLTypedDict": ".imageurl", + "ImageURLChunk": ".imageurlchunk", + "ImageURLChunkImageURL": ".imageurlchunk", + "ImageURLChunkImageURLTypedDict": ".imageurlchunk", + "ImageURLChunkType": ".imageurlchunk", + "ImageURLChunkTypedDict": ".imageurlchunk", + "JSONSchema": ".jsonschema", + "JSONSchemaTypedDict": ".jsonschema", + "MistralPromptMode": ".mistralpromptmode", + "OCRImageObject": ".ocrimageobject", + "OCRImageObjectTypedDict": ".ocrimageobject", + "OCRPageDimensions": ".ocrpagedimensions", + "OCRPageDimensionsTypedDict": ".ocrpagedimensions", + "OCRPageObject": ".ocrpageobject", + "OCRPageObjectTypedDict": ".ocrpageobject", + "Document": ".ocrrequest", + "DocumentTypedDict": ".ocrrequest", + "OCRRequest": ".ocrrequest", + "OCRRequestTypedDict": ".ocrrequest", + "OCRResponse": ".ocrresponse", + "OCRResponseTypedDict": ".ocrresponse", + "OCRUsageInfo": ".ocrusageinfo", + "OCRUsageInfoTypedDict": ".ocrusageinfo", + "Prediction": ".prediction", + "PredictionTypedDict": ".prediction", + "ReferenceChunk": ".referencechunk", + "ReferenceChunkType": ".referencechunk", + "ReferenceChunkTypedDict": ".referencechunk", + "ResponseFormat": ".responseformat", + "ResponseFormatTypedDict": ".responseformat", + "ResponseFormats": ".responseformats", + "SDKError": ".sdkerror", + "Security": ".security", + "SecurityTypedDict": ".security", + "Role": ".systemmessage", + "SystemMessage": ".systemmessage", + "SystemMessageContent": ".systemmessage", + "SystemMessageContentTypedDict": ".systemmessage", + "SystemMessageTypedDict": ".systemmessage", + "TextChunk": ".textchunk", + "TextChunkTypedDict": ".textchunk", + "Type": ".textchunk", + "Tool": ".tool", + "ToolTypedDict": ".tool", + "ToolCall": ".toolcall", + "ToolCallTypedDict": ".toolcall", + "ToolChoice": ".toolchoice", + "ToolChoiceTypedDict": ".toolchoice", + "ToolChoiceEnum": ".toolchoiceenum", + "ToolMessage": ".toolmessage", + "ToolMessageContent": ".toolmessage", + "ToolMessageContentTypedDict": ".toolmessage", + "ToolMessageRole": ".toolmessage", + "ToolMessageTypedDict": ".toolmessage", + "ToolTypes": ".tooltypes", + "UsageInfo": ".usageinfo", + "UsageInfoTypedDict": ".usageinfo", + "UserMessage": ".usermessage", + "UserMessageContent": ".usermessage", + "UserMessageContentTypedDict": ".usermessage", + "UserMessageRole": ".usermessage", + "UserMessageTypedDict": ".usermessage", + "Loc": ".validationerror", + "LocTypedDict": ".validationerror", + "ValidationError": ".validationerror", + "ValidationErrorTypedDict": ".validationerror", +} + + +def __getattr__(attr_name: str) -> object: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"No {attr_name} found in _dynamic_imports for module name -> {__name__} " + ) + + try: + module = import_module(module_name, __package__) + result = getattr(module, attr_name) + return result + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/assistantmessage.py b/packages/mistralai_azure/src/mistralai_azure/models/assistantmessage.py index 530b33df..86f5ec09 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/assistantmessage.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/assistantmessage.py @@ -56,7 +56,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionrequest.py b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionrequest.py index f48c1f50..8dffe1bd 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionrequest.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionrequest.py @@ -2,6 +2,7 @@ from __future__ import annotations from .assistantmessage import AssistantMessage, AssistantMessageTypedDict +from .mistralpromptmode import MistralPromptMode from .prediction import Prediction, PredictionTypedDict from .responseformat import ResponseFormat, ResponseFormatTypedDict from .systemmessage import SystemMessage, SystemMessageTypedDict @@ -17,8 +18,9 @@ UNSET, UNSET_SENTINEL, ) -from mistralai_azure.utils import get_discriminator +from mistralai_azure.utils import get_discriminator, validate_open_enum from pydantic import Discriminator, Tag, model_serializer +from pydantic.functional_validators import PlainValidator from typing import List, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -96,6 +98,8 @@ class ChatCompletionRequestTypedDict(TypedDict): r"""Number of completions to return for each request, input tokens are only billed once.""" prediction: NotRequired[PredictionTypedDict] parallel_tool_calls: NotRequired[bool] + prompt_mode: NotRequired[Nullable[MistralPromptMode]] + r"""Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used.""" safe_prompt: NotRequired[bool] r"""Whether to inject a safety prompt before all conversations.""" @@ -144,6 +148,11 @@ class ChatCompletionRequest(BaseModel): parallel_tool_calls: Optional[bool] = None + prompt_mode: Annotated[ + OptionalNullable[MistralPromptMode], PlainValidator(validate_open_enum(False)) + ] = UNSET + r"""Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used.""" + safe_prompt: Optional[bool] = None r"""Whether to inject a safety prompt before all conversations.""" @@ -165,16 +174,24 @@ def serialize_model(self, handler): "n", "prediction", "parallel_tool_calls", + "prompt_mode", "safe_prompt", ] - nullable_fields = ["temperature", "max_tokens", "random_seed", "tools", "n"] + nullable_fields = [ + "temperature", + "max_tokens", + "random_seed", + "tools", + "n", + "prompt_mode", + ] null_default_fields = [] serialized = handler(self) m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionresponse.py b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionresponse.py index ecd85d5c..7a66f322 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionresponse.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionresponse.py @@ -4,8 +4,8 @@ from .chatcompletionchoice import ChatCompletionChoice, ChatCompletionChoiceTypedDict from .usageinfo import UsageInfo, UsageInfoTypedDict from mistralai_azure.types import BaseModel -from typing import List, Optional -from typing_extensions import NotRequired, TypedDict +from typing import List +from typing_extensions import TypedDict class ChatCompletionResponseTypedDict(TypedDict): @@ -13,8 +13,8 @@ class ChatCompletionResponseTypedDict(TypedDict): object: str model: str usage: UsageInfoTypedDict - created: NotRequired[int] - choices: NotRequired[List[ChatCompletionChoiceTypedDict]] + created: int + choices: List[ChatCompletionChoiceTypedDict] class ChatCompletionResponse(BaseModel): @@ -26,6 +26,6 @@ class ChatCompletionResponse(BaseModel): usage: UsageInfo - created: Optional[int] = None + created: int - choices: Optional[List[ChatCompletionChoice]] = None + choices: List[ChatCompletionChoice] diff --git a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionstreamrequest.py b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionstreamrequest.py index 50cf1f01..5fced93e 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionstreamrequest.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/chatcompletionstreamrequest.py @@ -2,6 +2,7 @@ from __future__ import annotations from .assistantmessage import AssistantMessage, AssistantMessageTypedDict +from .mistralpromptmode import MistralPromptMode from .prediction import Prediction, PredictionTypedDict from .responseformat import ResponseFormat, ResponseFormatTypedDict from .systemmessage import SystemMessage, SystemMessageTypedDict @@ -17,8 +18,9 @@ UNSET, UNSET_SENTINEL, ) -from mistralai_azure.utils import get_discriminator +from mistralai_azure.utils import get_discriminator, validate_open_enum from pydantic import Discriminator, Tag, model_serializer +from pydantic.functional_validators import PlainValidator from typing import List, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -91,6 +93,8 @@ class ChatCompletionStreamRequestTypedDict(TypedDict): r"""Number of completions to return for each request, input tokens are only billed once.""" prediction: NotRequired[PredictionTypedDict] parallel_tool_calls: NotRequired[bool] + prompt_mode: NotRequired[Nullable[MistralPromptMode]] + r"""Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used.""" safe_prompt: NotRequired[bool] r"""Whether to inject a safety prompt before all conversations.""" @@ -138,6 +142,11 @@ class ChatCompletionStreamRequest(BaseModel): parallel_tool_calls: Optional[bool] = None + prompt_mode: Annotated[ + OptionalNullable[MistralPromptMode], PlainValidator(validate_open_enum(False)) + ] = UNSET + r"""Allows toggling between the reasoning mode and no system prompt. When set to `reasoning` the system prompt for reasoning models will be used.""" + safe_prompt: Optional[bool] = None r"""Whether to inject a safety prompt before all conversations.""" @@ -159,16 +168,24 @@ def serialize_model(self, handler): "n", "prediction", "parallel_tool_calls", + "prompt_mode", "safe_prompt", ] - nullable_fields = ["temperature", "max_tokens", "random_seed", "tools", "n"] + nullable_fields = [ + "temperature", + "max_tokens", + "random_seed", + "tools", + "n", + "prompt_mode", + ] null_default_fields = [] serialized = handler(self) m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/completionresponsestreamchoice.py b/packages/mistralai_azure/src/mistralai_azure/models/completionresponsestreamchoice.py index 37294d9b..1a492204 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/completionresponsestreamchoice.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/completionresponsestreamchoice.py @@ -38,7 +38,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/deltamessage.py b/packages/mistralai_azure/src/mistralai_azure/models/deltamessage.py index 112eb127..7fa3c3f2 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/deltamessage.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/deltamessage.py @@ -46,7 +46,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/documenturlchunk.py b/packages/mistralai_azure/src/mistralai_azure/models/documenturlchunk.py new file mode 100644 index 00000000..23ff71a6 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/documenturlchunk.py @@ -0,0 +1,62 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import Literal, Optional +from typing_extensions import NotRequired, TypedDict + + +DocumentURLChunkType = Literal["document_url"] + + +class DocumentURLChunkTypedDict(TypedDict): + document_url: str + document_name: NotRequired[Nullable[str]] + r"""The filename of the document""" + type: NotRequired[DocumentURLChunkType] + + +class DocumentURLChunk(BaseModel): + document_url: str + + document_name: OptionalNullable[str] = UNSET + r"""The filename of the document""" + + type: Optional[DocumentURLChunkType] = "document_url" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["document_name", "type"] + nullable_fields = ["document_name"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/filechunk.py b/packages/mistralai_azure/src/mistralai_azure/models/filechunk.py new file mode 100644 index 00000000..2c3edc07 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/filechunk.py @@ -0,0 +1,23 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import BaseModel +from mistralai_azure.utils import validate_const +import pydantic +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class FileChunkTypedDict(TypedDict): + file_id: str + type: Literal["file"] + + +class FileChunk(BaseModel): + file_id: str + + TYPE: Annotated[ + Annotated[Optional[Literal["file"]], AfterValidator(validate_const("file"))], + pydantic.Field(alias="type"), + ] = "file" diff --git a/packages/mistralai_azure/src/mistralai_azure/models/imageurl.py b/packages/mistralai_azure/src/mistralai_azure/models/imageurl.py index 8faa272b..a5a66360 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/imageurl.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/imageurl.py @@ -32,7 +32,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/jsonschema.py b/packages/mistralai_azure/src/mistralai_azure/models/jsonschema.py index b2d07d3a..0f7563fc 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/jsonschema.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/jsonschema.py @@ -40,7 +40,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/mistralpromptmode.py b/packages/mistralai_azure/src/mistralai_azure/models/mistralpromptmode.py new file mode 100644 index 00000000..bd4584a5 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/mistralpromptmode.py @@ -0,0 +1,8 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import UnrecognizedStr +from typing import Literal, Union + + +MistralPromptMode = Union[Literal["reasoning"], UnrecognizedStr] diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrimageobject.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrimageobject.py new file mode 100644 index 00000000..9d0dd01d --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrimageobject.py @@ -0,0 +1,89 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict + + +class OCRImageObjectTypedDict(TypedDict): + id: str + r"""Image ID for extracted image in a page""" + top_left_x: Nullable[int] + r"""X coordinate of top-left corner of the extracted image""" + top_left_y: Nullable[int] + r"""Y coordinate of top-left corner of the extracted image""" + bottom_right_x: Nullable[int] + r"""X coordinate of bottom-right corner of the extracted image""" + bottom_right_y: Nullable[int] + r"""Y coordinate of bottom-right corner of the extracted image""" + image_base64: NotRequired[Nullable[str]] + r"""Base64 string of the extracted image""" + image_annotation: NotRequired[Nullable[str]] + r"""Annotation of the extracted image in json str""" + + +class OCRImageObject(BaseModel): + id: str + r"""Image ID for extracted image in a page""" + + top_left_x: Nullable[int] + r"""X coordinate of top-left corner of the extracted image""" + + top_left_y: Nullable[int] + r"""Y coordinate of top-left corner of the extracted image""" + + bottom_right_x: Nullable[int] + r"""X coordinate of bottom-right corner of the extracted image""" + + bottom_right_y: Nullable[int] + r"""Y coordinate of bottom-right corner of the extracted image""" + + image_base64: OptionalNullable[str] = UNSET + r"""Base64 string of the extracted image""" + + image_annotation: OptionalNullable[str] = UNSET + r"""Annotation of the extracted image in json str""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["image_base64", "image_annotation"] + nullable_fields = [ + "top_left_x", + "top_left_y", + "bottom_right_x", + "bottom_right_y", + "image_base64", + "image_annotation", + ] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrpagedimensions.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrpagedimensions.py new file mode 100644 index 00000000..efb62a58 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrpagedimensions.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import BaseModel +from typing_extensions import TypedDict + + +class OCRPageDimensionsTypedDict(TypedDict): + dpi: int + r"""Dots per inch of the page-image""" + height: int + r"""Height of the image in pixels""" + width: int + r"""Width of the image in pixels""" + + +class OCRPageDimensions(BaseModel): + dpi: int + r"""Dots per inch of the page-image""" + + height: int + r"""Height of the image in pixels""" + + width: int + r"""Width of the image in pixels""" diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrpageobject.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrpageobject.py new file mode 100644 index 00000000..4438e732 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrpageobject.py @@ -0,0 +1,64 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .ocrimageobject import OCRImageObject, OCRImageObjectTypedDict +from .ocrpagedimensions import OCRPageDimensions, OCRPageDimensionsTypedDict +from mistralai_azure.types import BaseModel, Nullable, UNSET_SENTINEL +from pydantic import model_serializer +from typing import List +from typing_extensions import TypedDict + + +class OCRPageObjectTypedDict(TypedDict): + index: int + r"""The page index in a pdf document starting from 0""" + markdown: str + r"""The markdown string response of the page""" + images: List[OCRImageObjectTypedDict] + r"""List of all extracted images in the page""" + dimensions: Nullable[OCRPageDimensionsTypedDict] + r"""The dimensions of the PDF Page's screenshot image""" + + +class OCRPageObject(BaseModel): + index: int + r"""The page index in a pdf document starting from 0""" + + markdown: str + r"""The markdown string response of the page""" + + images: List[OCRImageObject] + r"""List of all extracted images in the page""" + + dimensions: Nullable[OCRPageDimensions] + r"""The dimensions of the PDF Page's screenshot image""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [] + nullable_fields = ["dimensions"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrrequest.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrrequest.py new file mode 100644 index 00000000..533d0742 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrrequest.py @@ -0,0 +1,120 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict +from .filechunk import FileChunk, FileChunkTypedDict +from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict +from .responseformat import ResponseFormat, ResponseFormatTypedDict +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import List, Optional, Union +from typing_extensions import NotRequired, TypeAliasType, TypedDict + + +DocumentTypedDict = TypeAliasType( + "DocumentTypedDict", + Union[FileChunkTypedDict, ImageURLChunkTypedDict, DocumentURLChunkTypedDict], +) +r"""Document to run OCR on""" + + +Document = TypeAliasType("Document", Union[FileChunk, ImageURLChunk, DocumentURLChunk]) +r"""Document to run OCR on""" + + +class OCRRequestTypedDict(TypedDict): + model: Nullable[str] + document: DocumentTypedDict + r"""Document to run OCR on""" + id: NotRequired[str] + pages: NotRequired[Nullable[List[int]]] + r"""Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0""" + include_image_base64: NotRequired[Nullable[bool]] + r"""Include image URLs in response""" + image_limit: NotRequired[Nullable[int]] + r"""Max images to extract""" + image_min_size: NotRequired[Nullable[int]] + r"""Minimum height and width of image to extract""" + bbox_annotation_format: NotRequired[Nullable[ResponseFormatTypedDict]] + r"""Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field""" + document_annotation_format: NotRequired[Nullable[ResponseFormatTypedDict]] + r"""Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field""" + + +class OCRRequest(BaseModel): + model: Nullable[str] + + document: Document + r"""Document to run OCR on""" + + id: Optional[str] = None + + pages: OptionalNullable[List[int]] = UNSET + r"""Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0""" + + include_image_base64: OptionalNullable[bool] = UNSET + r"""Include image URLs in response""" + + image_limit: OptionalNullable[int] = UNSET + r"""Max images to extract""" + + image_min_size: OptionalNullable[int] = UNSET + r"""Minimum height and width of image to extract""" + + bbox_annotation_format: OptionalNullable[ResponseFormat] = UNSET + r"""Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field""" + + document_annotation_format: OptionalNullable[ResponseFormat] = UNSET + r"""Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [ + "id", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + ] + nullable_fields = [ + "model", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + ] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrresponse.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrresponse.py new file mode 100644 index 00000000..3e43fa8e --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrresponse.py @@ -0,0 +1,68 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .ocrpageobject import OCRPageObject, OCRPageObjectTypedDict +from .ocrusageinfo import OCRUsageInfo, OCRUsageInfoTypedDict +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import List +from typing_extensions import NotRequired, TypedDict + + +class OCRResponseTypedDict(TypedDict): + pages: List[OCRPageObjectTypedDict] + r"""List of OCR info for pages.""" + model: str + r"""The model used to generate the OCR.""" + usage_info: OCRUsageInfoTypedDict + document_annotation: NotRequired[Nullable[str]] + r"""Formatted response in the request_format if provided in json str""" + + +class OCRResponse(BaseModel): + pages: List[OCRPageObject] + r"""List of OCR info for pages.""" + + model: str + r"""The model used to generate the OCR.""" + + usage_info: OCRUsageInfo + + document_annotation: OptionalNullable[str] = UNSET + r"""Formatted response in the request_format if provided in json str""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["document_annotation"] + nullable_fields = ["document_annotation"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/ocrusageinfo.py b/packages/mistralai_azure/src/mistralai_azure/models/ocrusageinfo.py new file mode 100644 index 00000000..1f5c9f1b --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/models/ocrusageinfo.py @@ -0,0 +1,57 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict + + +class OCRUsageInfoTypedDict(TypedDict): + pages_processed: int + r"""Number of pages processed""" + doc_size_bytes: NotRequired[Nullable[int]] + r"""Document size in bytes""" + + +class OCRUsageInfo(BaseModel): + pages_processed: int + r"""Number of pages processed""" + + doc_size_bytes: OptionalNullable[int] = UNSET + r"""Document size in bytes""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["doc_size_bytes"] + nullable_fields = ["doc_size_bytes"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/responseformat.py b/packages/mistralai_azure/src/mistralai_azure/models/responseformat.py index cfd58dcf..6d09de5b 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/responseformat.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/responseformat.py @@ -37,7 +37,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/toolmessage.py b/packages/mistralai_azure/src/mistralai_azure/models/toolmessage.py index 3e9aa3da..abca8abe 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/toolmessage.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/toolmessage.py @@ -51,7 +51,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/models/usageinfo.py b/packages/mistralai_azure/src/mistralai_azure/models/usageinfo.py index b1d094fc..bbe5cdfa 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/usageinfo.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/usageinfo.py @@ -1,19 +1,82 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai_azure.types import BaseModel -from typing_extensions import TypedDict +from mistralai_azure.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +import pydantic +from pydantic import ConfigDict, model_serializer +from typing import Any, Dict, Optional +from typing_extensions import NotRequired, TypedDict class UsageInfoTypedDict(TypedDict): - prompt_tokens: int - completion_tokens: int - total_tokens: int + prompt_tokens: NotRequired[int] + completion_tokens: NotRequired[int] + total_tokens: NotRequired[int] + prompt_audio_seconds: NotRequired[Nullable[int]] class UsageInfo(BaseModel): - prompt_tokens: int + model_config = ConfigDict( + populate_by_name=True, arbitrary_types_allowed=True, extra="allow" + ) + __pydantic_extra__: Dict[str, Any] = pydantic.Field(init=False) - completion_tokens: int + prompt_tokens: Optional[int] = 0 - total_tokens: int + completion_tokens: Optional[int] = 0 + + total_tokens: Optional[int] = 0 + + prompt_audio_seconds: OptionalNullable[int] = UNSET + + @property + def additional_properties(self): + return self.__pydantic_extra__ + + @additional_properties.setter + def additional_properties(self, value): + self.__pydantic_extra__ = value # pyright: ignore[reportIncompatibleVariableOverride] + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_audio_seconds", + ] + nullable_fields = ["prompt_audio_seconds"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + for k, v in serialized.items(): + m[k] = v + + return m diff --git a/packages/mistralai_azure/src/mistralai_azure/models/usermessage.py b/packages/mistralai_azure/src/mistralai_azure/models/usermessage.py index 8cce1745..05976fc0 100644 --- a/packages/mistralai_azure/src/mistralai_azure/models/usermessage.py +++ b/packages/mistralai_azure/src/mistralai_azure/models/usermessage.py @@ -39,7 +39,7 @@ def serialize_model(self, handler): m = {} - for n, f in self.model_fields.items(): + for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) diff --git a/packages/mistralai_azure/src/mistralai_azure/ocr.py b/packages/mistralai_azure/src/mistralai_azure/ocr.py new file mode 100644 index 00000000..71fe0337 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/ocr.py @@ -0,0 +1,271 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basesdk import BaseSDK +from mistralai_azure import models, utils +from mistralai_azure._hooks import HookContext +from mistralai_azure.types import Nullable, OptionalNullable, UNSET +from typing import Any, List, Mapping, Optional, Union + + +class Ocr(BaseSDK): + def process( + self, + *, + model: Nullable[str], + document: Union[models.Document, models.DocumentTypedDict], + id: Optional[str] = None, + pages: OptionalNullable[List[int]] = UNSET, + include_image_base64: OptionalNullable[bool] = UNSET, + image_limit: OptionalNullable[int] = UNSET, + image_min_size: OptionalNullable[int] = UNSET, + bbox_annotation_format: OptionalNullable[ + Union[models.ResponseFormat, models.ResponseFormatTypedDict] + ] = UNSET, + document_annotation_format: OptionalNullable[ + Union[models.ResponseFormat, models.ResponseFormatTypedDict] + ] = UNSET, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> Optional[models.OCRResponse]: + r"""OCR + + :param model: + :param document: Document to run OCR on + :param id: + :param pages: Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 + :param include_image_base64: Include image URLs in response + :param image_limit: Max images to extract + :param image_min_size: Minimum height and width of image to extract + :param bbox_annotation_format: Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field + :param document_annotation_format: Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.OCRRequest( + model=model, + id=id, + document=utils.get_pydantic_model(document, models.Document), + pages=pages, + include_image_base64=include_image_base64, + image_limit=image_limit, + image_min_size=image_min_size, + bbox_annotation_format=utils.get_pydantic_model( + bbox_annotation_format, OptionalNullable[models.ResponseFormat] + ), + document_annotation_format=utils.get_pydantic_model( + document_annotation_format, OptionalNullable[models.ResponseFormat] + ), + ) + + req = self._build_request( + method="POST", + path="/ocr", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request, False, False, "json", models.OCRRequest + ), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="ocr_v1_ocr_post", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.OCRResponse]) + if utils.match_response(http_res, "422", "application/json"): + response_data = utils.unmarshal_json( + http_res.text, models.HTTPValidationErrorData + ) + raise models.HTTPValidationError(data=response_data) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + + async def process_async( + self, + *, + model: Nullable[str], + document: Union[models.Document, models.DocumentTypedDict], + id: Optional[str] = None, + pages: OptionalNullable[List[int]] = UNSET, + include_image_base64: OptionalNullable[bool] = UNSET, + image_limit: OptionalNullable[int] = UNSET, + image_min_size: OptionalNullable[int] = UNSET, + bbox_annotation_format: OptionalNullable[ + Union[models.ResponseFormat, models.ResponseFormatTypedDict] + ] = UNSET, + document_annotation_format: OptionalNullable[ + Union[models.ResponseFormat, models.ResponseFormatTypedDict] + ] = UNSET, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> Optional[models.OCRResponse]: + r"""OCR + + :param model: + :param document: Document to run OCR on + :param id: + :param pages: Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 + :param include_image_base64: Include image URLs in response + :param image_limit: Max images to extract + :param image_min_size: Minimum height and width of image to extract + :param bbox_annotation_format: Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field + :param document_annotation_format: Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + request = models.OCRRequest( + model=model, + id=id, + document=utils.get_pydantic_model(document, models.Document), + pages=pages, + include_image_base64=include_image_base64, + image_limit=image_limit, + image_min_size=image_min_size, + bbox_annotation_format=utils.get_pydantic_model( + bbox_annotation_format, OptionalNullable[models.ResponseFormat] + ), + document_annotation_format=utils.get_pydantic_model( + document_annotation_format, OptionalNullable[models.ResponseFormat] + ), + ) + + req = self._build_request_async( + method="POST", + path="/ocr", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request, False, False, "json", models.OCRRequest + ), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["429", "500", "502", "503", "504"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="ocr_v1_ocr_post", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.OCRResponse]) + if utils.match_response(http_res, "422", "application/json"): + response_data = utils.unmarshal_json( + http_res.text, models.HTTPValidationErrorData + ) + raise models.HTTPValidationError(data=response_data) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) diff --git a/packages/mistralai_azure/src/mistralai_azure/sdkconfiguration.py b/packages/mistralai_azure/src/mistralai_azure/sdkconfiguration.py index 605e5d74..51289cf0 100644 --- a/packages/mistralai_azure/src/mistralai_azure/sdkconfiguration.py +++ b/packages/mistralai_azure/src/mistralai_azure/sdkconfiguration.py @@ -1,6 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from ._hooks import SDKHooks from ._version import ( __gen_version__, __openapi_doc_version__, @@ -42,9 +41,6 @@ class SDKConfiguration: retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET) timeout_ms: Optional[int] = None - def __post_init__(self): - self._hooks = SDKHooks() - def get_server_details(self) -> Tuple[str, Dict[str, str]]: if self.server_url is not None and self.server_url: return remove_suffix(self.server_url, "/"), {} @@ -55,6 +51,3 @@ def get_server_details(self) -> Tuple[str, Dict[str, str]]: raise ValueError(f'Invalid server "{self.server}"') return SERVERS[self.server], {} - - def get_hooks(self) -> SDKHooks: - return self._hooks diff --git a/packages/mistralai_azure/src/mistralai_azure/types/basemodel.py b/packages/mistralai_azure/src/mistralai_azure/types/basemodel.py index a6187efa..231c2e37 100644 --- a/packages/mistralai_azure/src/mistralai_azure/types/basemodel.py +++ b/packages/mistralai_azure/src/mistralai_azure/types/basemodel.py @@ -2,7 +2,7 @@ from pydantic import ConfigDict, model_serializer from pydantic import BaseModel as PydanticBaseModel -from typing import TYPE_CHECKING, Literal, Optional, TypeVar, Union, NewType +from typing import TYPE_CHECKING, Literal, Optional, TypeVar, Union from typing_extensions import TypeAliasType, TypeAlias @@ -35,5 +35,5 @@ def __bool__(self) -> Literal[False]: "OptionalNullable", Union[Optional[Nullable[T]], Unset], type_params=(T,) ) -UnrecognizedInt = NewType("UnrecognizedInt", int) -UnrecognizedStr = NewType("UnrecognizedStr", str) +UnrecognizedInt: TypeAlias = int +UnrecognizedStr: TypeAlias = str diff --git a/packages/mistralai_azure/src/mistralai_azure/utils/__init__.py b/packages/mistralai_azure/src/mistralai_azure/utils/__init__.py index 3cded8fe..dd4aa4b3 100644 --- a/packages/mistralai_azure/src/mistralai_azure/utils/__init__.py +++ b/packages/mistralai_azure/src/mistralai_azure/utils/__init__.py @@ -1,50 +1,55 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from .annotations import get_discriminator -from .enums import OpenEnumMeta -from .headers import get_headers, get_response_headers -from .metadata import ( - FieldMetadata, - find_metadata, - FormMetadata, - HeaderMetadata, - MultipartFormMetadata, - PathParamMetadata, - QueryParamMetadata, - RequestMetadata, - SecurityMetadata, -) -from .queryparams import get_query_params -from .retries import BackoffStrategy, Retries, retry, retry_async, RetryConfig -from .requestbodies import serialize_request_body, SerializedRequestBody -from .security import get_security -from .serializers import ( - get_pydantic_model, - marshal_json, - unmarshal, - unmarshal_json, - serialize_decimal, - serialize_float, - serialize_int, - stream_to_text, - stream_to_text_async, - stream_to_bytes, - stream_to_bytes_async, - validate_const, - validate_decimal, - validate_float, - validate_int, - validate_open_enum, -) -from .url import generate_url, template_url, remove_suffix -from .values import ( - get_global_from_env, - match_content_type, - match_status_codes, - match_response, - cast_partial, -) -from .logger import Logger, get_body_content, get_default_logger +from typing import TYPE_CHECKING +from importlib import import_module + +if TYPE_CHECKING: + from .annotations import get_discriminator + from .datetimes import parse_datetime + from .enums import OpenEnumMeta + from .headers import get_headers, get_response_headers + from .metadata import ( + FieldMetadata, + find_metadata, + FormMetadata, + HeaderMetadata, + MultipartFormMetadata, + PathParamMetadata, + QueryParamMetadata, + RequestMetadata, + SecurityMetadata, + ) + from .queryparams import get_query_params + from .retries import BackoffStrategy, Retries, retry, retry_async, RetryConfig + from .requestbodies import serialize_request_body, SerializedRequestBody + from .security import get_security + from .serializers import ( + get_pydantic_model, + marshal_json, + unmarshal, + unmarshal_json, + serialize_decimal, + serialize_float, + serialize_int, + stream_to_text, + stream_to_text_async, + stream_to_bytes, + stream_to_bytes_async, + validate_const, + validate_decimal, + validate_float, + validate_int, + validate_open_enum, + ) + from .url import generate_url, template_url, remove_suffix + from .values import ( + get_global_from_env, + match_content_type, + match_status_codes, + match_response, + cast_partial, + ) + from .logger import Logger, get_body_content, get_default_logger __all__ = [ "BackoffStrategy", @@ -55,6 +60,7 @@ "get_body_content", "get_default_logger", "get_discriminator", + "parse_datetime", "get_global_from_env", "get_headers", "get_pydantic_model", @@ -97,3 +103,82 @@ "validate_open_enum", "cast_partial", ] + +_dynamic_imports: dict[str, str] = { + "BackoffStrategy": ".retries", + "FieldMetadata": ".metadata", + "find_metadata": ".metadata", + "FormMetadata": ".metadata", + "generate_url": ".url", + "get_body_content": ".logger", + "get_default_logger": ".logger", + "get_discriminator": ".annotations", + "parse_datetime": ".datetimes", + "get_global_from_env": ".values", + "get_headers": ".headers", + "get_pydantic_model": ".serializers", + "get_query_params": ".queryparams", + "get_response_headers": ".headers", + "get_security": ".security", + "HeaderMetadata": ".metadata", + "Logger": ".logger", + "marshal_json": ".serializers", + "match_content_type": ".values", + "match_status_codes": ".values", + "match_response": ".values", + "MultipartFormMetadata": ".metadata", + "OpenEnumMeta": ".enums", + "PathParamMetadata": ".metadata", + "QueryParamMetadata": ".metadata", + "remove_suffix": ".url", + "Retries": ".retries", + "retry": ".retries", + "retry_async": ".retries", + "RetryConfig": ".retries", + "RequestMetadata": ".metadata", + "SecurityMetadata": ".metadata", + "serialize_decimal": ".serializers", + "serialize_float": ".serializers", + "serialize_int": ".serializers", + "serialize_request_body": ".requestbodies", + "SerializedRequestBody": ".requestbodies", + "stream_to_text": ".serializers", + "stream_to_text_async": ".serializers", + "stream_to_bytes": ".serializers", + "stream_to_bytes_async": ".serializers", + "template_url": ".url", + "unmarshal": ".serializers", + "unmarshal_json": ".serializers", + "validate_decimal": ".serializers", + "validate_const": ".serializers", + "validate_float": ".serializers", + "validate_int": ".serializers", + "validate_open_enum": ".serializers", + "cast_partial": ".values", +} + + +def __getattr__(attr_name: str) -> object: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError( + f"no {attr_name} found in _dynamic_imports, module name -> {__name__} " + ) + + try: + module = import_module(module_name, __package__) + result = getattr(module, attr_name) + return result + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) diff --git a/packages/mistralai_azure/src/mistralai_azure/utils/datetimes.py b/packages/mistralai_azure/src/mistralai_azure/utils/datetimes.py new file mode 100644 index 00000000..a6c52cd6 --- /dev/null +++ b/packages/mistralai_azure/src/mistralai_azure/utils/datetimes.py @@ -0,0 +1,23 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from datetime import datetime +import sys + + +def parse_datetime(datetime_string: str) -> datetime: + """ + Convert a RFC 3339 / ISO 8601 formatted string into a datetime object. + Python versions 3.11 and later support parsing RFC 3339 directly with + datetime.fromisoformat(), but for earlier versions, this function + encapsulates the necessary extra logic. + """ + # Python 3.11 and later can parse RFC 3339 directly + if sys.version_info >= (3, 11): + return datetime.fromisoformat(datetime_string) + + # For Python 3.10 and earlier, a common ValueError is trailing 'Z' suffix, + # so fix that upfront. + if datetime_string.endswith("Z"): + datetime_string = datetime_string[:-1] + "+00:00" + + return datetime.fromisoformat(datetime_string) diff --git a/packages/mistralai_azure/src/mistralai_azure/utils/enums.py b/packages/mistralai_azure/src/mistralai_azure/utils/enums.py index c650b10c..c3bc13cf 100644 --- a/packages/mistralai_azure/src/mistralai_azure/utils/enums.py +++ b/packages/mistralai_azure/src/mistralai_azure/utils/enums.py @@ -1,34 +1,74 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" import enum - +import sys class OpenEnumMeta(enum.EnumMeta): - def __call__( - cls, value, names=None, *, module=None, qualname=None, type=None, start=1 - ): - # The `type` kwarg also happens to be a built-in that pylint flags as - # redeclared. Safe to ignore this lint rule with this scope. - # pylint: disable=redefined-builtin + # The __call__ method `boundary` kwarg was added in 3.11 and must be present + # for pyright. Refer also: https://github.com/pylint-dev/pylint/issues/9622 + # pylint: disable=unexpected-keyword-arg + # The __call__ method `values` varg must be named for pyright. + # pylint: disable=keyword-arg-before-vararg + + if sys.version_info >= (3, 11): + def __call__( + cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None + ): + # The `type` kwarg also happens to be a built-in that pylint flags as + # redeclared. Safe to ignore this lint rule with this scope. + # pylint: disable=redefined-builtin + + if names is not None: + return super().__call__( + value, + names=names, + *values, + module=module, + qualname=qualname, + type=type, + start=start, + boundary=boundary, + ) + + try: + return super().__call__( + value, + names=names, # pyright: ignore[reportArgumentType] + *values, + module=module, + qualname=qualname, + type=type, + start=start, + boundary=boundary, + ) + except ValueError: + return value + else: + def __call__( + cls, value, names=None, *, module=None, qualname=None, type=None, start=1 + ): + # The `type` kwarg also happens to be a built-in that pylint flags as + # redeclared. Safe to ignore this lint rule with this scope. + # pylint: disable=redefined-builtin - if names is not None: - return super().__call__( - value, - names=names, - module=module, - qualname=qualname, - type=type, - start=start, - ) + if names is not None: + return super().__call__( + value, + names=names, + module=module, + qualname=qualname, + type=type, + start=start, + ) - try: - return super().__call__( - value, - names=names, # pyright: ignore[reportArgumentType] - module=module, - qualname=qualname, - type=type, - start=start, - ) - except ValueError: - return value + try: + return super().__call__( + value, + names=names, # pyright: ignore[reportArgumentType] + module=module, + qualname=qualname, + type=type, + start=start, + ) + except ValueError: + return value diff --git a/packages/mistralai_azure/src/mistralai_azure/utils/forms.py b/packages/mistralai_azure/src/mistralai_azure/utils/forms.py index 0472aba8..e873495f 100644 --- a/packages/mistralai_azure/src/mistralai_azure/utils/forms.py +++ b/packages/mistralai_azure/src/mistralai_azure/utils/forms.py @@ -86,11 +86,39 @@ def _populate_form( return form +def _extract_file_properties(file_obj: Any) -> Tuple[str, Any, Any]: + """Extract file name, content, and content type from a file object.""" + file_fields: Dict[str, FieldInfo] = file_obj.__class__.model_fields + + file_name = "" + content = None + content_type = None + + for file_field_name in file_fields: + file_field = file_fields[file_field_name] + + file_metadata = find_field_metadata(file_field, MultipartFormMetadata) + if file_metadata is None: + continue + + if file_metadata.content: + content = getattr(file_obj, file_field_name, None) + elif file_field_name == "content_type": + content_type = getattr(file_obj, file_field_name, None) + else: + file_name = getattr(file_obj, file_field_name) + + if file_name == "" or content is None: + raise ValueError("invalid multipart/form-data file") + + return file_name, content, content_type + + def serialize_multipart_form( media_type: str, request: Any -) -> Tuple[str, Dict[str, Any], Dict[str, Any]]: +) -> Tuple[str, Dict[str, Any], List[Tuple[str, Any]]]: form: Dict[str, Any] = {} - files: Dict[str, Any] = {} + files: List[Tuple[str, Any]] = [] if not isinstance(request, BaseModel): raise TypeError("invalid request body type") @@ -112,39 +140,32 @@ def serialize_multipart_form( f_name = field.alias if field.alias else name if field_metadata.file: - file_fields: Dict[str, FieldInfo] = val.__class__.model_fields - - file_name = "" - content = None - content_type = None - - for file_field_name in file_fields: - file_field = file_fields[file_field_name] + if isinstance(val, List): + # Handle array of files + for file_obj in val: + if not _is_set(file_obj): + continue + + file_name, content, content_type = _extract_file_properties(file_obj) - file_metadata = find_field_metadata(file_field, MultipartFormMetadata) - if file_metadata is None: - continue + if content_type is not None: + files.append((f_name + "[]", (file_name, content, content_type))) + else: + files.append((f_name + "[]", (file_name, content))) + else: + # Handle single file + file_name, content, content_type = _extract_file_properties(val) - if file_metadata.content: - content = getattr(val, file_field_name, None) - elif file_field_name == "content_type": - content_type = getattr(val, file_field_name, None) + if content_type is not None: + files.append((f_name, (file_name, content, content_type))) else: - file_name = getattr(val, file_field_name) - - if file_name == "" or content is None: - raise ValueError("invalid multipart/form-data file") - - if content_type is not None: - files[f_name] = (file_name, content, content_type) - else: - files[f_name] = (file_name, content) + files.append((f_name, (file_name, content))) elif field_metadata.json: - files[f_name] = ( + files.append((f_name, ( None, marshal_json(val, request_field_types[name]), "application/json", - ) + ))) else: if isinstance(val, List): values = [] diff --git a/packages/mistralai_azure/src/mistralai_azure/utils/serializers.py b/packages/mistralai_azure/src/mistralai_azure/utils/serializers.py index baa41fbd..76e44d71 100644 --- a/packages/mistralai_azure/src/mistralai_azure/utils/serializers.py +++ b/packages/mistralai_azure/src/mistralai_azure/utils/serializers.py @@ -1,13 +1,16 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from decimal import Decimal +import functools import json -from typing import Any, Dict, List, Union, get_args -import httpx +import typing +from typing import Any, Dict, List, Tuple, Union, get_args +import typing_extensions from typing_extensions import get_origin + +import httpx from pydantic import ConfigDict, create_model from pydantic_core import from_json -from typing_inspection.typing_objects import is_union from ..types.basemodel import BaseModel, Nullable, OptionalNullable, Unset @@ -185,6 +188,13 @@ def is_nullable(field): return False +def is_union(obj: object) -> bool: + """ + Returns True if the given object is a typing.Union or typing_extensions.Union. + """ + return any(obj is typing_obj for typing_obj in _get_typing_objects_by_name_of("Union")) + + def stream_to_text(stream: httpx.Response) -> str: return "".join(stream.iter_text()) @@ -217,3 +227,22 @@ def _contains_pydantic_model(data: Any) -> bool: return any(_contains_pydantic_model(value) for value in data.values()) return False + + +@functools.cache +def _get_typing_objects_by_name_of(name: str) -> Tuple[Any, ...]: + """ + Get typing objects by name from typing and typing_extensions. + Reference: https://typing-extensions.readthedocs.io/en/latest/#runtime-use-of-types + """ + result = tuple( + getattr(module, name) + for module in (typing, typing_extensions) + if hasattr(module, name) + ) + if not result: + raise ValueError( + f"Neither typing nor typing_extensions has an object called {name!r}" + ) + return result + diff --git a/pyproject.toml b/pyproject.toml index 2631d4da..f82f5bc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "mistralai" -version = "1.9.7" +version = "1.9.8" description = "Python Client SDK for the Mistral AI API." authors = [{ name = "Mistral" },] readme = "README-PYPI.md" diff --git a/src/mistralai/_version.py b/src/mistralai/_version.py index 1843bff5..d2236835 100644 --- a/src/mistralai/_version.py +++ b/src/mistralai/_version.py @@ -3,10 +3,10 @@ import importlib.metadata __title__: str = "mistralai" -__version__: str = "1.9.7" +__version__: str = "1.9.8" __openapi_doc_version__: str = "1.0.0" __gen_version__: str = "2.634.2" -__user_agent__: str = "speakeasy-sdk/python 1.9.7 2.634.2 1.0.0 mistralai" +__user_agent__: str = "speakeasy-sdk/python 1.9.8 2.634.2 1.0.0 mistralai" try: if __package__ is not None: