diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 6e86c59c..678c20f2 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,26 +1,26 @@ lockVersion: 2.0.0 id: 2d045ec7-2ebb-4f4d-ad25-40953b132161 management: - docChecksum: 2d9e4f612e5caf84349ab02663eee66e + docChecksum: b66b034aac7aa9b38c4fb47a3b3d843e docVersion: 1.0.0 - speakeasyVersion: 1.685.0 - generationVersion: 2.794.1 - releaseVersion: 2.0.0a3 - configChecksum: 7fc1ba01c21def8447b979e71593af4a + speakeasyVersion: 1.729.0 + generationVersion: 2.841.0 + releaseVersion: 2.0.0-a3.1 + configChecksum: 134292298710eaf25a0f90f7097e648f repoURL: https://github.com/mistralai/client-python.git installationURL: https://github.com/mistralai/client-python.git published: true persistentEdits: - generation_id: 3aa9018f-cb6c-4c1b-96d0-b832fd5f6513 - pristine_commit_hash: 5c4e3b65b7572c91338d50dc3ca91ea6a46eedf7 - pristine_tree_hash: aaea604044e12872107c3b550ea7be094fb66a99 + generation_id: 21ec746f-e476-468a-bb8e-c942c0997501 + pristine_commit_hash: 99ae95385eb06175841ba19bef78319a5921c585 + pristine_tree_hash: 5b06b6f5add0cd16af8139d524a42368532441c6 features: python: additionalDependencies: 1.0.0 additionalProperties: 1.0.1 configurableModuleName: 0.2.0 - constsAndDefaults: 1.0.5 - core: 5.23.18 + constsAndDefaults: 1.0.7 + core: 6.0.12 customCodeRegions: 0.1.1 defaultEnabledRetries: 0.2.0 downloadStreams: 1.0.1 @@ -29,23 +29,23 @@ features: examples: 3.0.2 flatRequests: 1.0.1 flattening: 3.1.1 - globalSecurity: 3.0.4 + globalSecurity: 3.0.5 globalSecurityCallbacks: 1.0.0 globalSecurityFlattening: 1.0.0 globalServerURLs: 3.2.0 includes: 3.0.0 methodArguments: 1.0.2 multipartFileContentType: 1.0.0 - nameOverrides: 3.0.1 - nullables: 1.0.1 - openEnums: 1.0.2 - responseFormat: 1.0.1 - retries: 3.0.3 - sdkHooks: 1.2.0 - serverEvents: 1.0.11 + nameOverrides: 3.0.3 + nullables: 1.0.2 + openEnums: 1.0.4 + responseFormat: 1.1.0 + retries: 3.0.4 + sdkHooks: 1.2.1 + serverEvents: 1.0.13 serverEventsSentinels: 0.1.0 serverIDs: 3.0.0 - unions: 3.1.1 + unions: 3.1.4 uploadStreams: 1.0.0 trackedFiles: .gitattributes: @@ -58,64 +58,128 @@ trackedFiles: pristine_git_object: 8d79f0abb72526f1fb34a4c03e5bba612c6ba2ae USAGE.md: id: 3aed33ce6e6f - last_write_checksum: sha1:b1cf4cc87111df10c55731b3f5abad22890387a2 - pristine_git_object: 1810386448a440cfc5f7b8579695b228ae40460d + last_write_checksum: sha1:50cc0351d6145a805d1d5ae8be4dfce58178e648 + pristine_git_object: f71bbabc223b8cef8d923816fce8d572f3901884 + docs/errors/httpvalidationerror.md: + id: 7fe2e5327e07 + last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e + pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/agent.md: id: ffdbb4c53c87 - last_write_checksum: sha1:4538aaa78a09b7e33db405f84916b1eb82f94bca - pristine_git_object: e335d889cdb70f4d3c987827ff714db90418cb39 + last_write_checksum: sha1:c87b05a17785cd83fdfc58cb2d55b6d77d3bc23e + pristine_git_object: 4de5a901d120b85ba5940490a2ec3fd4f1a91136 docs/models/agentaliasresponse.md: id: 5ac4721d8947 last_write_checksum: sha1:15dcc6820e89d2c6bb799e331463419ce29ec167 pristine_git_object: aa531ec5d1464f95e3938f148c1e88efc30fa6a6 docs/models/agentconversation.md: id: 3590c1a566fa - last_write_checksum: sha1:264d78815c3999bac377ab3f8c08a264178baf43 - pristine_git_object: a2d617316f1965acfabf7d2fe74334de16213829 + last_write_checksum: sha1:43e7c1ed2b43aca2794d89f2e6d6aa5f1478cc3e + pristine_git_object: 451f6fb8f700dddd54c69593c316bf562b5cbc93 docs/models/agentconversationagentversion.md: id: 468e0d1614bb last_write_checksum: sha1:6e60bf4a18d791d694e90c89bdb8cc38e43c324b pristine_git_object: 668a8dc0f0c51a231a73aed51b2db13de243a038 - docs/models/agentconversationobject.md: - id: cfd35d9dd4f2 - last_write_checksum: sha1:112552d4a241967cf0a7dcb981428e7e0715dc34 - pristine_git_object: ea7cc75c5197ed42f9fb508a969baa16effe1f98 - docs/models/agentcreationrequest.md: - id: 697a770fe5c0 - last_write_checksum: sha1:b3f12ca0a6356e657de2941c8441fc951bcc96f4 - pristine_git_object: f0f0fdbc13f8f490ded4f8df3944250aece1311b - docs/models/agentcreationrequesttool.md: - id: 392d970ffb74 - last_write_checksum: sha1:310d4b107554a9c16143191fdc306a5438b63768 - pristine_git_object: b3bd7fa3cead0a0a1480b0e1b3f0afbfd177b600 docs/models/agenthandoffdoneevent.md: id: dcf166a3c3b0 last_write_checksum: sha1:9e95c09f724827f5e9c202fd634bdfa2baef1b6e pristine_git_object: 6bfcc3d83457edf05d0f13957d34ead0f260599b docs/models/agenthandoffentry.md: id: 39d54f489b84 - last_write_checksum: sha1:7d949e750fd24dea20cabae340f9204d8f756008 - pristine_git_object: 8831b0ebad1c4e857f4f4353d1815753bb13125f - docs/models/agenthandoffentryobject.md: - id: ac62dd5f1002 - last_write_checksum: sha1:9d25ec388406e6faa765cf163e1e6dcb590ca0e9 - pristine_git_object: 4bb876fb3c60a42cf530c932b7c60278e6036f03 - docs/models/agenthandoffentrytype.md: - id: 07506fd159e0 - last_write_checksum: sha1:27ce9bdf225fbad46230e339a5c6d96213f1df62 - pristine_git_object: 527ebceb2ff1bbba1067f30438befd5e2c2e91d6 + last_write_checksum: sha1:a93a604ced2303eb6f93cfe0f1360224d3298b37 + pristine_git_object: 2b689ec720c02b7289ec462d7acca64a82b23570 docs/models/agenthandoffstartedevent.md: id: b620102af460 last_write_checksum: sha1:33732e0465423348c2ace458506a597a3dadf9b2 pristine_git_object: 518b5a0c4521ec55a5a28ba3ef0ad1c1fce52792 - docs/models/agentobject.md: - id: ed24a6d647a0 - last_write_checksum: sha1:ff5dfde6cc19f09c83afb5b4f0f103096df6691d - pristine_git_object: 70e143b030d3041c7538ecdacb8f5f9f8d1b5c92 + docs/models/agentsapiv1agentscreateorupdatealiasrequest.md: + id: c09ec9946094 + last_write_checksum: sha1:0883217b4bad21f5d4f8162ca72005bf9105a93f + pristine_git_object: 79406434cc6ff3d1485089f35639d6284f66d6cb + docs/models/agentsapiv1agentsdeletealiasrequest.md: + id: 429307ab315d + last_write_checksum: sha1:8e0a8388bb51c234aa1eb5566cb68389ebe57574 + pristine_git_object: 8e95c0c31e8ac92b374c153d622d7806b9e59a8d + docs/models/agentsapiv1agentsdeleterequest.md: + id: 0faaaa59add9 + last_write_checksum: sha1:2a34269e682bb910b83814b4d730ba2ce07f8cb2 + pristine_git_object: 2799f41817ab0f7a22b49b4ff895c8308525953c + docs/models/agentsapiv1agentsgetagentversion.md: + id: 3316961b40c4 + last_write_checksum: sha1:e4f4c6a64b1c2ec9465b7ad008df4d7859098e59 + pristine_git_object: 7fb9f2d578c4901ca1b41aaada6acc3a5ee94fa1 + docs/models/agentsapiv1agentsgetrequest.md: + id: 01740ae62cff + last_write_checksum: sha1:bc86e90289ec09b40212083a82455b4fe71c7194 + pristine_git_object: ceffe0096ffd6db97a6018d34870c29cec4fb0d3 + docs/models/agentsapiv1agentsgetversionrequest.md: + id: 88ed22b85cde + last_write_checksum: sha1:0ef23807c8efa2662144da66745045abdd2cb60a + pristine_git_object: 96a7358943a69e871a2bb7f0f30d6fe2bb8dff3d + docs/models/agentsapiv1agentslistrequest.md: + id: c2720c209527 + last_write_checksum: sha1:99502da34d868f1563ad1e3ea256f3becdbefa11 + pristine_git_object: 4785a54c561f5f9e1eb7ffd3317c5faa9b8b56dd + docs/models/agentsapiv1agentslistversionaliasesrequest.md: + id: 69c8bce2c017 + last_write_checksum: sha1:4083fc80627b2cc04fd271df21393944730ef1ba + pristine_git_object: 3083bf92641404738948cd57306eac978b701551 + docs/models/agentsapiv1agentslistversionsrequest.md: + id: 0bc44ed8d6bb + last_write_checksum: sha1:315790552fc5b2b3a6c4f7be2eb33100133abe18 + pristine_git_object: 91831700bed92cb4f609f8c412dcb0ee98b544ca + docs/models/agentsapiv1agentsupdaterequest.md: + id: 7692812cd677 + last_write_checksum: sha1:aaccaa13eeb0d775b0c6a0b23c328d3f3c2c2dbf + pristine_git_object: 7ef60becfcdde09c8ce0366361306c5661d67e24 + docs/models/agentsapiv1agentsupdateversionrequest.md: + id: a001251b1624 + last_write_checksum: sha1:0ee9e0fc55fd969f2b8f2c55dec93bf10e0e5b2f + pristine_git_object: e937acc9b1d3f50eee69495b1305f7aee1c960ac + docs/models/agentsapiv1conversationsappendrequest.md: + id: 70f76380e810 + last_write_checksum: sha1:d428dc114b60362d269b5ae50a57ea60b9edee1a + pristine_git_object: ac8a00ecab30305de8eb8c7c08cda1b1c04148c3 + docs/models/agentsapiv1conversationsappendstreamrequest.md: + id: f6ada9a592c5 + last_write_checksum: sha1:8a806ca2e5bad75d9d0cf50726dc0d5b8e7e3eab + pristine_git_object: dbc330f11aa3039c9cea2dd7d477d56d5c4969d0 + docs/models/agentsapiv1conversationsdeleterequest.md: + id: c2c9f084ed93 + last_write_checksum: sha1:9ecca93f8123cebdd1f9e74cf0f4a104b46402a8 + pristine_git_object: c6eed281331cb4d2cac4470de5e04935d22eca5a + docs/models/agentsapiv1conversationsgetrequest.md: + id: d6acce23f92c + last_write_checksum: sha1:b5d5529b72c16293d3d9b5c45dcb2e3798405bcf + pristine_git_object: 67d450c88778cb27d7d0ba06d49d9f419840b32e + docs/models/agentsapiv1conversationshistoryrequest.md: + id: e3efc36ea8b5 + last_write_checksum: sha1:4155100eaed6d3b7410b3f4476f000d1879576be + pristine_git_object: 7e5d39e9a11ac437a24b8c059db56527fa93f8b0 + docs/models/agentsapiv1conversationslistrequest.md: + id: 406c3e92777a + last_write_checksum: sha1:d5c5effcf2ca32900678d20b667bdaf8ca908194 + pristine_git_object: 62c9011faf26b3a4268186f01caf98c186e7d5b4 + docs/models/agentsapiv1conversationslistresponse.md: + id: 394c37d2203f + last_write_checksum: sha1:1144f41f8a97daacfb75c11fdf3575e553cf0859 + pristine_git_object: b233ee203ff5da0c65d6e9f87b2925d6802d2c0a + docs/models/agentsapiv1conversationsmessagesrequest.md: + id: 2c749c6620d4 + last_write_checksum: sha1:781e526b030653dc189d94ca04cdc4742f9506d2 + pristine_git_object: a91ab0466d57379eacea9d475c72db9cb228a649 + docs/models/agentsapiv1conversationsrestartrequest.md: + id: 6955883f9a44 + last_write_checksum: sha1:99c1455c7fde9b82b6940e6e1ed4f363d7c38de9 + pristine_git_object: a18a41f5395adae3942573792c86ddf7c3812ff4 + docs/models/agentsapiv1conversationsrestartstreamrequest.md: + id: 0c39856fd70e + last_write_checksum: sha1:d03475c088c059077049270c69be01c67a17f178 + pristine_git_object: 7548286af5d1db51fbfd29c893eb8afdc3c97c4d docs/models/agentscompletionrequest.md: id: 906b82c214dc - last_write_checksum: sha1:84ee0378e413830260a279a67fc3b1342e643328 - pristine_git_object: d87dc7da67dd883f92a23d8df4f5648e97c4f12e + last_write_checksum: sha1:b5685a779b633823ccfe99d9740078e0aab50bde + pristine_git_object: 33435732b94c81c7bccff5cf1868b2f382223200 docs/models/agentscompletionrequestmessage.md: id: 5337f0644b40 last_write_checksum: sha1:ecf7b7cdf0d24a5e97b520366cf816b8731734bb @@ -130,8 +194,8 @@ trackedFiles: pristine_git_object: 63b9dca9fbb8d829f93d8327a77fbc385a846c76 docs/models/agentscompletionstreamrequest.md: id: 21d09756447b - last_write_checksum: sha1:0c88bc63255733480b65b61685dcc356fcc9ed66 - pristine_git_object: dd1804a1b3a2aadc3e3c3964262b0fc25195703f + last_write_checksum: sha1:9d506ac8f620f4cef54b4b7a1891fb17b8eaefa5 + pristine_git_object: 407be8e0c1264a31cc0d80c1059f3bd62c2eaceb docs/models/agentscompletionstreamrequestmessage.md: id: b309ade92081 last_write_checksum: sha1:98744c9646969250242cbbfbdf428dbd7030e4bb @@ -148,58 +212,34 @@ trackedFiles: id: 513b8b7bc0b7 last_write_checksum: sha1:9154d0ac6b0ab8970a10a8ad7716009d62e80ce7 pristine_git_object: 022f7e10edb22cb1b1d741c13ac586bd136d03b5 - docs/models/agentupdaterequest.md: - id: 75a7f820b906 - last_write_checksum: sha1:358e39130bc439f5801a2dcc73502a1f1c2c6685 - pristine_git_object: b1830d7be6cb8e33529246a3368deaf0909a3343 - docs/models/agentupdaterequesttool.md: - id: 9c9aac9dda3d - last_write_checksum: sha1:25d8a331a706bf8e6056b99f8ff1a46abff6ae72 - pristine_git_object: ce5531260e9b06db0b93d4bfcf95a12b627da522 docs/models/apiendpoint.md: id: be613fd9b947 last_write_checksum: sha1:4d984c11248f7da42c949164e69b53995d5942c4 pristine_git_object: 8d83a26f19241da5ce626ff9526575c50e5d27be - docs/models/appendconversationrequest.md: - id: 295b6d446690 - last_write_checksum: sha1:0c3d7091b19abf30fb0b78800cab292abd902c1d - pristine_git_object: 977d8e8b797c8ae36de4da90bc32bba47a6a0779 - docs/models/appendconversationstreamrequest.md: - id: aeea33736f95 - last_write_checksum: sha1:a0b5b036e46688e862c7f7671c86f965b5322742 - pristine_git_object: a23231c2c2f0017ba29c8863c3046aebe8f57ff1 - docs/models/archiveftmodelout.md: - id: 9e855deac0d1 - last_write_checksum: sha1:41866e666241ed42e5e7c6df5a64b887f1ff774b - pristine_git_object: 98fa7b19e4579198b433eccc76b2b4d990476b72 - docs/models/archivemodelrequest.md: - id: 3fde72a45ad9 - last_write_checksum: sha1:60eaa9be631215c63a2c01da7da809ec34f5b01a - pristine_git_object: 806d135e2bc6c0da2b20a4bb84107d3ab31962ad + docs/models/archivemodelresponse.md: + id: 133f4af8058f + last_write_checksum: sha1:95fa73ebd765cbd244c847218df6d31e18dc5e85 + pristine_git_object: 276656d1d00ca174e78aa9102f7f576575daa818 docs/models/arguments.md: id: 7ea5e33709a7 last_write_checksum: sha1:09eea126210d7fd0353e60a76bf1dbed173f13ec pristine_git_object: 2e54e27e0ca97bee87918b2ae38cc6c335669a79 docs/models/assistantmessage.md: id: 7e0218023943 - last_write_checksum: sha1:e75d407349842b2de46ee3ca6250f9f51121cf38 - pristine_git_object: 3d0bd90b4433c1a919f917f4bcf2518927cdcd50 + last_write_checksum: sha1:47d5cd1a1bef9e398c12c207f5b3d8486d94f359 + pristine_git_object: 9ef638379aee1198742743800e778409c47a9b9d docs/models/assistantmessagecontent.md: id: 9f1795bbe642 last_write_checksum: sha1:1ce4066623a8d62d969e5ed3a088d73a9ba26643 pristine_git_object: 047b7cf95f4db203bf2c501680b73ca0562a122d - docs/models/assistantmessagerole.md: - id: bb5d2a4bc72f - last_write_checksum: sha1:82f2c4f469426bd476c1003a91394afb89cb7c91 - pristine_git_object: 658229e77eb6419391cf7941568164541c528387 docs/models/attributes.md: id: ececf40457de last_write_checksum: sha1:9f23adf16a682cc43346d157f7e971c596b416ef pristine_git_object: 147708d9238e40e1cdb222beee15fbe8c1603050 docs/models/audiochunk.md: id: 88315a758fd4 - last_write_checksum: sha1:d52e493765280fc0b1df61a0ce1086205965c712 - pristine_git_object: 8a04af045f4ce33a2964f5f75664e82c3edf1bf3 + last_write_checksum: sha1:b47b295122cea28d66212d75a1f0eccd70a248cc + pristine_git_object: 1ba8b0f578fa94b4f8dddf559798e033a1704e7b docs/models/audioencoding.md: id: 1e0dfee9c2a0 last_write_checksum: sha1:5d47cfaca916d7a47adbea71748595b3ab69a478 @@ -210,8 +250,8 @@ trackedFiles: pristine_git_object: d174ab9959cadde659f76db94ed87c743e0f6783 docs/models/audiotranscriptionrequest.md: id: ebf59641bc84 - last_write_checksum: sha1:c55c97a06726812323a031897beffbb160021c05 - pristine_git_object: d7f5bd51b1289f0eb481d86a71bb483ee50bbc40 + last_write_checksum: sha1:a478d0656a0f69d4c426e548e2236b99730e2084 + pristine_git_object: 80bd53015ddee1bcecc7aeecc75152a19afc22c1 docs/models/audiotranscriptionrequeststream.md: id: 79b5f721b753 last_write_checksum: sha1:df6825c05b5a02dcf904ebaa40fb97e9186248cc @@ -224,18 +264,10 @@ trackedFiles: id: 8053e29a3f26 last_write_checksum: sha1:23a12dc2e95f92a7a3691bd65a1b05012c669f0f pristine_git_object: 95016cdc4c6225d23edc4436e11e4a7feacf1fe6 - docs/models/batchjobin.md: - id: 10f37fc761f1 - last_write_checksum: sha1:0acea471920959b7c85a015e557216c783de4e88 - pristine_git_object: 7dcf265dfe63cbbd13b7fa0e56fc62717f3ee050 - docs/models/batchjobout.md: - id: 49a98e5b2aba - last_write_checksum: sha1:b504fcf5a65567ec114fdc5b79cabe7554b36cac - pristine_git_object: 5f1011734b249a75cf9381d024f295fe31ff9f68 - docs/models/batchjobsout.md: - id: d8041dee5b90 - last_write_checksum: sha1:5e4127548b50abbb6cee267ac53a8e05f55b97f9 - pristine_git_object: 7a9d6f688e87851ed7ffa516523e12cb3f967c68 + docs/models/batchjob.md: + id: de2a00d0f739 + last_write_checksum: sha1:1160822c4032e1745dfaf37abcac02e78cbc4fb4 + pristine_git_object: 162e2cff3a1132f2b89e57dcf1bf8b4c403b6453 docs/models/batchjobstatus.md: id: 7e6f034d3c91 last_write_checksum: sha1:9e876b4b94255e1399bbb31feb51e08691bcb8fc @@ -248,22 +280,10 @@ trackedFiles: id: 9d14e972f08a last_write_checksum: sha1:1f32eb515e32c58685d0bdc15de09656194c508c pristine_git_object: f96f50444aaa23ca291db2fd0dc69db0d9d149d9 - docs/models/cancelbatchjobrequest.md: - id: db6860fe9ec3 - last_write_checksum: sha1:d2f55d5ffec21f6f70cc77c643c73113b0d1ed43 - pristine_git_object: f31f843bb864fc21ed620e4e069b8a97a091d99c - docs/models/cancelfinetuningjobrequest.md: - id: 10d341c56c9c - last_write_checksum: sha1:a484ad9d8eb791d60e5447b845b73871e9f1e6a3 - pristine_git_object: 6525788cd527eca4d89f95d4c829c1b3eda0f06e - docs/models/cancelfinetuningjobresponse.md: - id: 0c9ca281a898 - last_write_checksum: sha1:ac02c2a268a21430e74f8075671de0b97fd844e6 - pristine_git_object: c512342e575e9b6d57da08b20f50c86510d246d8 docs/models/chatclassificationrequest.md: id: 57b86771c870 - last_write_checksum: sha1:2ee5fff26c780ade7ed89617358befa93a6dfd23 - pristine_git_object: 910d62ae20fc67e9a3200397aeab95513bfed90f + last_write_checksum: sha1:bfd2fb8e2c83578ca0cea5209ea3f18c3bcd2ae5 + pristine_git_object: ba9c95eab2c1e4f080e39e8804a5de222e052ee6 docs/models/chatcompletionchoice.md: id: 0d15c59ab501 last_write_checksum: sha1:a6274a39a4239e054816d08517bf8507cb5c4564 @@ -274,8 +294,8 @@ trackedFiles: pristine_git_object: b2f15ecbe88328de95b4961ddb3940fd8a6ee64b docs/models/chatcompletionrequest.md: id: adffe90369d0 - last_write_checksum: sha1:f6eec11c908ee6581e508fff98e785441c4b84ad - pristine_git_object: f3abeeff4346c181cfca40eb819a8c6ecf656026 + last_write_checksum: sha1:4980b698006c641b1c84495c5b601cc8662b05f6 + pristine_git_object: 921161faf38b2f4d4648d6d744c08a96ed38f0a6 docs/models/chatcompletionrequestmessage.md: id: 3f5e170d418c last_write_checksum: sha1:7921c5a508a9f88adc01caab34e26182b8035607 @@ -294,8 +314,8 @@ trackedFiles: pristine_git_object: a0465ffbfc5558628953e03fbc53b80bbdc8649b docs/models/chatcompletionstreamrequest.md: id: cf8f29558a68 - last_write_checksum: sha1:7ed921e0366c1b00225c05e60937fb8d228f027b - pristine_git_object: 42792d396462dead9d7a80a87f05a0888efe348b + last_write_checksum: sha1:c54d4a32d0d65533b79c381174690e9b735b2800 + pristine_git_object: 8761f000d4249de86265bc63da785cd807c2e7a5 docs/models/chatcompletionstreamrequestmessage.md: id: 053a98476cd2 last_write_checksum: sha1:8270692463fab1243d9de4bbef7162daa64e52c5 @@ -324,10 +344,10 @@ trackedFiles: id: aec173bca43b last_write_checksum: sha1:14ce49ace5845bc467fe1559b12374bfd36bc9a7 pristine_git_object: ff1c6ea32233d5c5e8d6292c62f9e8eacd3340c3 - docs/models/checkpointout.md: - id: 909ce66e1f65 - last_write_checksum: sha1:89e678d55b97353ad1c3b28d9f1ab101f6be0928 - pristine_git_object: 053592d2c57c43220bec3df27cc1486554178955 + docs/models/checkpoint.md: + id: 9c97119961cf + last_write_checksum: sha1:0e7732d9c30f67d59fe4d9ad1d165ad0cd80c790 + pristine_git_object: f7f35530c0d57aca02b2503e968a9a262bb1a10d docs/models/classificationrequest.md: id: 6f79e905a3fa last_write_checksum: sha1:3e083210e1cfdd3539e714928688648673767ae8 @@ -344,58 +364,54 @@ trackedFiles: id: 97a5eab5eb54 last_write_checksum: sha1:41269d1372be3523f46cb57bd19292af4971f7c0 pristine_git_object: f3b10727b023dd83a207d955b3d0f3cd4b7479a1 - docs/models/classifierdetailedjobout.md: - id: a2084ba5cc8c - last_write_checksum: sha1:ee206a5c68bd7aed201f8274d0710e8c570a35d2 - pristine_git_object: fb532449458fb445bb79d3fa0ed8e6faa538f00a - docs/models/classifierdetailedjoboutintegration.md: - id: 7a775cbd4d9f - last_write_checksum: sha1:6b2691766c1795d17b1572076a693eb377c5307f - pristine_git_object: 9dfa6e8a179529bd12fb8935c264e3c57c62cb41 - docs/models/classifierdetailedjoboutstatus.md: - id: a98493f9d02d - last_write_checksum: sha1:3441d9961e9093d314dd1bc88df1743cd12866d2 - pristine_git_object: c3118aafa8614f20c9adf331033e7822b6391752 - docs/models/classifierftmodelout.md: - id: 268ac482c38b - last_write_checksum: sha1:46bdbe1176bbf43dd79a4ff8255129fd82bd97bc - pristine_git_object: 6e7afbbed075efe2e29f42b7bc3d758fe47460d4 - docs/models/classifierjobout.md: - id: 2e3498af3f8c - last_write_checksum: sha1:70845cc24cd48987552ca337ea5522066e6de1b9 - pristine_git_object: ceecef5decdbd74a9741401ad0f1a9e8e215ae82 - docs/models/classifierjoboutintegration.md: - id: 30a340fed57d - last_write_checksum: sha1:72dfda442a88f977f3480c95127534a600362806 - pristine_git_object: 33af8a708618c1e54c7f55e67c8848fe45217799 - docs/models/classifierjoboutstatus.md: - id: 2411c6bf3297 - last_write_checksum: sha1:6ceef218b783505231a0ec653292460e6cb1a65b - pristine_git_object: 4520f1648323227863f78f7f86b2b4567bb7ace7 - docs/models/classifiertargetin.md: - id: 90d2da204677 - last_write_checksum: sha1:18fca3deee476b3dd23d55a9a40ced96cdc21f83 - pristine_git_object: 78cab67b4ced9fd0139a1dc4e6b687de870f9c62 - docs/models/classifiertargetout.md: - id: 1ce5c0513022 - last_write_checksum: sha1:2b8ed8a25b6ea6f2717cb4edcfa3f6a1ff3e69e4 - pristine_git_object: 57535ae5cb7d30177d1800d3597fe2f6ec3ad024 + docs/models/classifierfinetunedmodel.md: + id: b67a370e0ef1 + last_write_checksum: sha1:5fe3c26e337083716dd823e861924a03c55ce293 + pristine_git_object: ad05f93147d6904ee62602480c24644ec5e4cf63 + docs/models/classifierfinetuningjob.md: + id: 5bf35c25183f + last_write_checksum: sha1:afedddfe38e217189b5ec12ded74606c3b1e4c59 + pristine_git_object: 369756ba16a4c64f03cb6bb5da9bc0abd2a8eac6 + docs/models/classifierfinetuningjobdetails.md: + id: c91d53e010d5 + last_write_checksum: sha1:59a4c11a0d52b02ffc48e011a40fb4ebb1604825 + pristine_git_object: c5efdf1c817b978506a4862991a0f8eab8b219fb + docs/models/classifierfinetuningjobdetailsintegration.md: + id: e6c161ac2a44 + last_write_checksum: sha1:6450686e7f92ac8c1c02fcea82d5855ca6738b46 + pristine_git_object: 438a35d9eb0e4250a9e6bcbb7dafeb26d74e018a + docs/models/classifierfinetuningjobdetailsstatus.md: + id: 87737e85b845 + last_write_checksum: sha1:2ff02df3efee0f9b5867045d43fc71025fb37129 + pristine_git_object: 058c65832188f7148d96ab320114d984d618efa1 + docs/models/classifierfinetuningjobintegration.md: + id: 91de20176a8c + last_write_checksum: sha1:e49a7c082300eb4d3106e96b21ebc6860060b8c3 + pristine_git_object: 820aee4c6fcf899341d869d796b1a61d4d4eab42 + docs/models/classifierfinetuningjobstatus.md: + id: e3c4e672dc88 + last_write_checksum: sha1:1bfd306ab633d3ea73272e56796c1f63843fce22 + pristine_git_object: ca829885de056c5ccafec0fe3a901743e56deb0c + docs/models/classifiertarget.md: + id: 4c5c0b3e0bc7 + last_write_checksum: sha1:ad16823def0acb267543c4189df32406a27685aa + pristine_git_object: f8c99e2e7e6653d0e809506861ec4c25571cb5c9 + docs/models/classifiertargetresult.md: + id: c78d27aec276 + last_write_checksum: sha1:17c37c10385019953d6085fff6681808f950693f + pristine_git_object: ccadc623493bfa946dc2cccf894364b1e6b8b452 docs/models/classifiertrainingparameters.md: id: 9370e1ccd3d5 last_write_checksum: sha1:03f7c32717792966afdec50cb9dc1c85bb99dd84 pristine_git_object: 3b6f3be6942bbcf56261f773864a518d16923880 - docs/models/classifiertrainingparametersin.md: - id: 8bcca130af93 - last_write_checksum: sha1:7e9d61d3377031c740ea98d6c3dc65be99dc059b - pristine_git_object: 1287c973fae9762310597fbeceaef26865ace04f docs/models/codeinterpretertool.md: id: f009740c6e54 - last_write_checksum: sha1:bce278ce22703246613254ee2dac57f8b14e8060 - pristine_git_object: 544cda9358faf6ec525d06f78068817aee55b193 + last_write_checksum: sha1:a2114d61a98a48b4365a77c0c75c06ca834367ad + pristine_git_object: 6302fc627d7c49442b6c9aec19c70fdceaf7c519 docs/models/completionargs.md: id: 3b54534f9830 - last_write_checksum: sha1:c0368b7c21524228939b2093ff1a4524eb57aeb7 - pristine_git_object: 60d091374a80418892df9700dc0c21e7dad28775 + last_write_checksum: sha1:7432daccf23d8963a65fa4f2b103ea0396fbfbeb + pristine_git_object: 148f760859636e8c32259604698785663491a445 docs/models/completionargsstop.md: id: 40b0f0c81dc8 last_write_checksum: sha1:2a576618c62d4818af0048ed3a79080149a88642 @@ -404,46 +420,46 @@ trackedFiles: id: 60cb30423c60 last_write_checksum: sha1:61b976fe2e71236cf7941ee1635decc31bd304b2 pristine_git_object: 7f8ab5e631e2c6d1d9830325e591a7e434b83a35 - docs/models/completiondetailedjobout.md: - id: 634ca7241abd - last_write_checksum: sha1:7899568eedfa04cccb5b07c2e0d1e821af8fb0a2 - pristine_git_object: bc7e5d1cb5c298d0d935a9e3472ad547b5b9714c - docs/models/completiondetailedjoboutintegration.md: - id: f8d1f509f456 - last_write_checksum: sha1:3317db3f71962623a6144e3de0db20b4abfd5b9b - pristine_git_object: 9e526053160cc393dc65242cff8f8143bc67e38c - docs/models/completiondetailedjoboutrepository.md: - id: a8e7452065a7 - last_write_checksum: sha1:b1910efc6cd1e50391cd33daef004441bac3d3cd - pristine_git_object: 92a7b75c51f27e73ca41d5ffee28921057959878 - docs/models/completiondetailedjoboutstatus.md: - id: c606d38452e2 - last_write_checksum: sha1:1e9a5736de32a44cf539f7eaf8214aad72ec4994 - pristine_git_object: b80525bad8f6292892d8aee864a549c8ec52171c docs/models/completionevent.md: id: e57cd17cb9dc last_write_checksum: sha1:4f59c67af0b11c77b80d2b9c7aca36484d2be219 pristine_git_object: 7a66e8fee2bb0f1c58166177653893bb05b98f1d - docs/models/completionftmodelout.md: - id: 93fed66a5794 - last_write_checksum: sha1:ee4bccae36229f23b1db8894585cc8e88ad71f6d - pristine_git_object: ccd4844fab92d000de1cc9ba59c884e31dc5db26 - docs/models/completionjobout.md: - id: 77315b024171 - last_write_checksum: sha1:a08ca1dcedbb9b88b9909a4b03251e2fb0cd8319 - pristine_git_object: 5eb44eef73872b0f1c2709381fc0852e3b3e224b - docs/models/completionjoboutintegration.md: - id: 25e651dd8d58 - last_write_checksum: sha1:59711a3fa46d6a4bff787a61c81ecc34bdaaec2e - pristine_git_object: 6474747bf8d38485f13b1702e3245ef9e0f866a9 - docs/models/completionjoboutrepository.md: - id: 2c94b3ecacf1 - last_write_checksum: sha1:2cb5b23640eeaf87f45dc9f180247ed7a6307df7 - pristine_git_object: 52f65558f8b3663596642d8854df36d29858beae - docs/models/completionjoboutstatus.md: - id: b77ebfd0e4f0 - last_write_checksum: sha1:b8f33134c63b12dc474e7714b1ac19d768a3cbbd - pristine_git_object: 917549450a096397d9a7ca0b8f5856f7cd62db04 + docs/models/completionfinetunedmodel.md: + id: 23a7705a9c89 + last_write_checksum: sha1:50d173b7505a97435c9d7ccb4fa99af04a51c6a2 + pristine_git_object: 0055db021f1c039c84cf7cfecd654683d2f9996f + docs/models/completionfinetuningjob.md: + id: 13c69dd18690 + last_write_checksum: sha1:b77e82f00f851034999986ff67aea5b0b558fbd2 + pristine_git_object: 83c0ae7e551e1f70df8dad4dce75ad20fe2b7ae7 + docs/models/completionfinetuningjobdetails.md: + id: b285f80afd59 + last_write_checksum: sha1:6ced5483d8249d7e8f502ec3f53f45d76e348003 + pristine_git_object: 3c54e874bcd968a9d5d9c8b3285632ba71364763 + docs/models/completionfinetuningjobdetailsintegration.md: + id: 27662795c95f + last_write_checksum: sha1:655f03341ad1b590ec451288607cec61024bfefc + pristine_git_object: 38f6a34963db4a653ec7aa7f0c85b68e837ebafc + docs/models/completionfinetuningjobdetailsrepository.md: + id: 023920eecc9e + last_write_checksum: sha1:2b8ba6ff115fda4cc6ed74825fb09b9500d915f6 + pristine_git_object: c6bd67cde1d1628aa3efc4a53fa8487a009aa129 + docs/models/completionfinetuningjobdetailsstatus.md: + id: b1b717a4e256 + last_write_checksum: sha1:97c8699f0979978ea4320da3388e18da6219cb87 + pristine_git_object: 94d795a9ba4ec743f274d4ab5666e8897d174c61 + docs/models/completionfinetuningjobintegration.md: + id: 392ffc2cdef2 + last_write_checksum: sha1:53540da44e0edbad5d4085f81ded159dbc582a6c + pristine_git_object: dbe57417d78f1de798c6eaea7e56984e3b002cb9 + docs/models/completionfinetuningjobrepository.md: + id: deb47b72e8e4 + last_write_checksum: sha1:c0fd43a01c2f763c7945311741ee3c2b9c7520f6 + pristine_git_object: 54225e27204b703a6b33d2d66492e272559c3b3c + docs/models/completionfinetuningjobstatus.md: + id: 2ac420312815 + last_write_checksum: sha1:90f498cb04e89e8f4a424762c07231fd9030b326 + pristine_git_object: db151a1bd871a2bf231424a78c8c450b2a488099 docs/models/completionresponsestreamchoice.md: id: d56824d615a6 last_write_checksum: sha1:0296a490df009dbfd04893fdebcc88dd6102a872 @@ -456,26 +472,26 @@ trackedFiles: id: b716b0195d39 last_write_checksum: sha1:1d8d7c469f933ea741ec15c8b9ef8b986e0ca95e pristine_git_object: 4746a95df18c78331f572425a16b2b3dcbc2df4c - docs/models/completiontrainingparametersin.md: - id: 7223a57004ab - last_write_checksum: sha1:8f77e5fe2ce149115b0bda372c57fafa931abd90 - pristine_git_object: 9fcc714e5f000e6134f7f03f1dd4f56956323385 + docs/models/confirmation.md: + id: 19b9e48a3c2e + last_write_checksum: sha1:eb6494cb19f23c6df62afb009cc88ce38d24af86 + pristine_git_object: fd6e6aaa58cabba0cdec1b76ac50fb6e46f91b07 docs/models/contentchunk.md: id: d2d3a32080cd last_write_checksum: sha1:b253e4b802adb5b66d896bfc6245ac4d21a0c67c pristine_git_object: cb7e51d3a6e05f197fceff4a4999594f3e340dac docs/models/conversationappendrequest.md: id: 722746e5065c - last_write_checksum: sha1:1677ab5b06748a7650464c0d7596e66e6759ede2 - pristine_git_object: 1cdb584b62423072f9a7cdc61f045b0d161525df + last_write_checksum: sha1:c8a4a49f0a1fe5cdd2ef6264ef9c600cfc8f7beb + pristine_git_object: 78a96508e4e1c6f83de4556d0bfa3b10c875da37 docs/models/conversationappendrequesthandoffexecution.md: id: e3f56d558037 last_write_checksum: sha1:dc71c8db746bb08f6630e995cf6af9fda747e954 pristine_git_object: 7418b36a55fab959639aec456a946600eb908efb docs/models/conversationappendstreamrequest.md: id: e9f8131435e8 - last_write_checksum: sha1:559d90bbf6d64f46221edaa6482837f0ee3b0626 - pristine_git_object: a8516ea7fc7db1d6bc0abb8f99b967a1715ceb4b + last_write_checksum: sha1:3afe7eaafbf61abcd9341ee8fbca5c6d0c2db0ab + pristine_git_object: daea9c522a8a0693edce11b1bbeca1f2cba0781e docs/models/conversationappendstreamrequesthandoffexecution.md: id: 5739ea777905 last_write_checksum: sha1:c85584b63c0c5d859ee5d46d6ae167a8ee44e279 @@ -490,24 +506,16 @@ trackedFiles: pristine_git_object: 5452d7d5ce2aa59a6d89c7b7363290e91ed8a0a3 docs/models/conversationhistory.md: id: 7e97e8e6d6e9 - last_write_checksum: sha1:06df76a87aca7c5acd5a28ca3306be09a8bb541b - pristine_git_object: c8baad0b597ddb9148413a651a659b06c20351ac - docs/models/conversationhistoryobject.md: - id: 088f7df6b658 - last_write_checksum: sha1:bcce4ef55e6e556f3c10f65e860faaedc8eb0671 - pristine_git_object: a14e7f9c7a392f0d98e79cff9cc3ea54f30146fa + last_write_checksum: sha1:719a7c0722f3ad2e9f428dd31abf7bd0bad197d2 + pristine_git_object: daefe3363fb57d9a7d2737d3ea3d6e6f61021d49 docs/models/conversationinputs.md: id: 23e3160b457d last_write_checksum: sha1:0c6abaa34575ee0eb22f12606de3eab7f4b7fbaf pristine_git_object: 86db40ea1390e84c10a31155b3cde9066eac23b0 docs/models/conversationmessages.md: id: 46684ffdf874 - last_write_checksum: sha1:01ccdc4b509d5f46ff185f686d332587e25fc5b7 - pristine_git_object: c3f00979b748ad83246a3824bb9be462895eafd6 - docs/models/conversationmessagesobject.md: - id: b1833c3c20e4 - last_write_checksum: sha1:bb91a6e2c89066299660375e5e18381d0df5a7ff - pristine_git_object: db3a441bde0d086bccda4814ddfbf737539681a6 + last_write_checksum: sha1:5b10a9f3f19591a2675979c21dd8383d5249d728 + pristine_git_object: 8fa51571697ee375bfbc708de854bc0b1129eec7 docs/models/conversationrequest.md: id: dd7f4d6807f2 last_write_checksum: sha1:e4da423f9eb7a8a5d0c21948b50e8df08a63552c @@ -526,16 +534,12 @@ trackedFiles: pristine_git_object: 2e4e8d01b5482c4e0644be52e55bf6912aeff69e docs/models/conversationresponse.md: id: 2eccf42d48af - last_write_checksum: sha1:17ebabdf1dd191eeac442046511c44120dfa97a1 - pristine_git_object: e31821288dd18bf425e442787f67a69ea35ff6a6 - docs/models/conversationresponseobject.md: - id: 6c028b455297 - last_write_checksum: sha1:76270a07b86b1a973b28106f2a11673d082a385b - pristine_git_object: bea66e5277feca4358dd6447959ca945eff2171a + last_write_checksum: sha1:8a86a4d0df6d13b121d5e41a8ee45555b69bf927 + pristine_git_object: 2732f785cdd706274ec5ff383f25fc201e6d0f78 docs/models/conversationrestartrequest.md: id: 558e9daa00bd - last_write_checksum: sha1:0e33f56f69313b9111b3394ecca693871d48acfa - pristine_git_object: d98653127fd522e35323b310d2342ccc08927962 + last_write_checksum: sha1:434e6c94b5d6c37b9026d536308cd1d3ff56e8d6 + pristine_git_object: ad3ff3624f533e4d4f751264d9bc6dd1849b3b69 docs/models/conversationrestartrequestagentversion.md: id: e6ea289c6b23 last_write_checksum: sha1:a5abf95a81b7e080bd3cadf65c2db38ca458573f @@ -546,8 +550,8 @@ trackedFiles: pristine_git_object: 5790624b82ce47ea99e5c25c825fbc25145bfb8e docs/models/conversationrestartstreamrequest.md: id: 01b92ab1b56d - last_write_checksum: sha1:aa3d30800417e04f741324d60529f3190ea9cd16 - pristine_git_object: a5f8cbe73ed1ce28c82d76f0e9f933bda64f733c + last_write_checksum: sha1:e9755598b5be197a938f1f74aa77ac24ccac8457 + pristine_git_object: 865a1e8f666d7f6878c40eb70fe5ab1c63da3066 docs/models/conversationrestartstreamrequestagentversion.md: id: 395265f34ff6 last_write_checksum: sha1:ebf4e89a478ab40e1f8cd3f9a000e179426bda47 @@ -572,58 +576,62 @@ trackedFiles: id: 71df6212ff44 last_write_checksum: sha1:f2882742a74dd2b4f74383efa444c7ab968249dc pristine_git_object: 0f75f82b38f224340bed468ceecfe622066740ba + docs/models/conversationthinkchunk.md: + id: b9a8324da8f1 + last_write_checksum: sha1:80aed188198434ceca134e7aa7351ddba82c92c9 + pristine_git_object: 1fb16bd99f2b6277f87cd40d5c1eca389819d725 + docs/models/conversationthinkchunkthinking.md: + id: 477db2d543bd + last_write_checksum: sha1:d9f8c37fe933a3e52e2adb3ffe283d79c187cd36 + pristine_git_object: 84b800188b248166aac0043994fa27d4d79aad9d docs/models/conversationusageinfo.md: id: 57ef89d3ab83 last_write_checksum: sha1:d92408ad37d7261b0f83588e6216871074a50225 pristine_git_object: 57e260335959c605a0b9b4eaa8bf1f8272f73ae0 - docs/models/createfinetuningjobresponse.md: - id: a9d31306296c - last_write_checksum: sha1:a15ccee66983fcc23321f966440d02fab4463178 - pristine_git_object: f82cd793b466b0028b586781d36c690c0e5f97cd - docs/models/createorupdateagentaliasrequest.md: - id: be33079aa124 - last_write_checksum: sha1:84cb72c549ee74c44dcf00b3f6a100060e322295 - pristine_git_object: af2591ebb584965f5110ed987993f3a72b513255 - docs/models/deleteagentaliasrequest.md: - id: c116b5c42b1b - last_write_checksum: sha1:51e1544cc867389120a2d1fbb4780c855690841e - pristine_git_object: 17812ec4a03b452a2d31950cc5a9e87a8f6d79f7 - docs/models/deleteagentrequest.md: - id: 6411b6df1c85 - last_write_checksum: sha1:1157d4717b75be91744bd7464c042e367faa4b71 - pristine_git_object: 0aaacae471dd81ddc5ce4808abdd2b5653503ff6 - docs/models/deleteconversationrequest.md: - id: 7247871c454c - last_write_checksum: sha1:a43ed3e32630fbb41921fa413ab2a26a914e425e - pristine_git_object: 39d9e5dfd52d9df1d1da7093761b65e0d12a0b40 - docs/models/deletedocumentrequest.md: - id: 898eebfc019e - last_write_checksum: sha1:f06a13be4484048cf15c21d46eb2d107057b39db - pristine_git_object: eb060099f1b078fd084551338b51ee6677e8d235 - docs/models/deletefileout.md: - id: c7b84242a45c - last_write_checksum: sha1:f2b039ab88fc83ec5dd765cab8e2ed8cce7e417d - pristine_git_object: 4709cc4958d008dc24430deb597f801b91c6957f - docs/models/deletefilerequest.md: - id: ca151d3da83a - last_write_checksum: sha1:ec50f13b099a6ef28d7965f7c8721ce1f505f7d2 - pristine_git_object: bceae901954471a8667a3a61e66da6361ef50190 - docs/models/deletelibraryaccessrequest.md: - id: ca39ae894c1f - last_write_checksum: sha1:41b7cd5c2e4616d3edefeb271dd7089fa04bd67d - pristine_git_object: c7034b98c30234a0a8cb368d84d9b287690027de - docs/models/deletelibraryrequest.md: - id: 4be1af37ab41 - last_write_checksum: sha1:2769939a702c26be619f6c455cd48365b64110cc - pristine_git_object: c229ad73b2a7c39dab0ccdfa29e1f0475f0cdc7b + docs/models/createagentrequest.md: + id: 9484bab389c1 + last_write_checksum: sha1:b3228a622081b6f4b2a8bdaa60ca16049517d819 + pristine_git_object: cca3a079c532d3426f65a15bb0affdd34fd1d3ac + docs/models/createagentrequesttool.md: + id: 72e5f99878c5 + last_write_checksum: sha1:a90ad01c15da321f0c8ec700ba359a5371c5dcbb + pristine_git_object: c6ed3e98566eb684932fae9d2648a85c84443493 + docs/models/createbatchjobrequest.md: + id: e79afe8f495c + last_write_checksum: sha1:6cedce49f3108b9d5bc80e6d11712c594f2d9e50 + pristine_git_object: d094e2d518b31ada68c282241af3aa1483e98ff6 + docs/models/createfileresponse.md: + id: ea1396cebae8 + last_write_checksum: sha1:7b26d0a466004aca5cefaeb29f84dafc405c51ff + pristine_git_object: 8152922b0d4ce199e269df955e5a25d4acf71e28 + docs/models/createfinetuningjobrequest.md: + id: 36824ba035ff + last_write_checksum: sha1:78f019530e9f5deace91c454c91ec6c4d0d23a20 + pristine_git_object: a93e323d5dd474c6d287e1203e85b79d11d762f0 + docs/models/createfinetuningjobrequestintegration.md: + id: e41b5575b494 + last_write_checksum: sha1:06dab95269f4a571a4c62a7f956fbf0250a0e8b3 + pristine_git_object: 0054a4a683a88fe67f92c1659bcb8c792ca8d286 + docs/models/createfinetuningjobrequestrepository.md: + id: e113eb1929b5 + last_write_checksum: sha1:6bd504d3ecb219f3245a83d306c1792133b96769 + pristine_git_object: 32be1b6dc3fcf7f6ee1a1d71abee4c81493655c2 + docs/models/createlibraryrequest.md: + id: 8935b2ed9d13 + last_write_checksum: sha1:c00abfe1abb0f0323e434b084dafa0d451eb3e51 + pristine_git_object: 71562806dbec6444dcdd0a19852a31ca00b6229a + docs/models/deletefileresponse.md: + id: ab3aa44589a0 + last_write_checksum: sha1:47ebc2474e4725e9ecb0f0d5940c604d9a82a4df + pristine_git_object: 188e2504606b051674352339c6aa999116a43b61 docs/models/deletemodelout.md: id: 5643e76768d5 last_write_checksum: sha1:1593c64f7673e59b7ef1f4ae9f5f6b556dd6a269 pristine_git_object: 5fd4df7a7013dcd4f6489ad29cdc664714d32efd - docs/models/deletemodelrequest.md: - id: 22c414d48ee4 - last_write_checksum: sha1:a60f549577b3461cb7552ad2080a34ad389f8579 - pristine_git_object: d80103f1610668292589b6d7b861de814c17afda + docs/models/deletemodelv1modelsmodeliddeleterequest.md: + id: c838cee0f093 + last_write_checksum: sha1:e5b6d18b4f8ab91630ae34a4f50f01e536e08d99 + pristine_git_object: d9bc15fe393388f7d0c41abce97ead17e35e2ba4 docs/models/deltamessage.md: id: 6c5ed6b60968 last_write_checksum: sha1:00052476b9b2474dbc149f18dd18c71c86d0fc74 @@ -634,40 +642,28 @@ trackedFiles: pristine_git_object: 8142772d7ea33ad8a75cf9cf822564ba3f630de2 docs/models/document.md: id: cd1d2a444370 - last_write_checksum: sha1:d00a2ac808a0ae83a7b97da87e647ecc8dca9c52 - pristine_git_object: 509d43b733d68d462853d9eb52fc913c855dff40 + last_write_checksum: sha1:77076e66dea6f4582e73ecc5a55ef750f026448a + pristine_git_object: 284babb98fbb0279bef2626fa18eada0035572c5 docs/models/documentlibrarytool.md: id: 68083b0ef8f3 - last_write_checksum: sha1:470b969fa4983c0e7ad3d513b4b7a4fa8d5f0f41 - pristine_git_object: 1695bad40cb0a1eb269e4ee12c6a81cbf0c7749a - docs/models/documentout.md: - id: a69fd1f47711 - last_write_checksum: sha1:ed446078e7194a0e44e21ab1af958d6a83597edb - pristine_git_object: 28df11eb1aef1fdaf3c1103b5d61549fb32ea85d + last_write_checksum: sha1:76b9f47c399915a338abe929cb10c1b37282eadf + pristine_git_object: 95c3fa52ee3ff29e72bc0240a98c0afaa0cd5f62 docs/models/documenttextcontent.md: id: 29587399f346 last_write_checksum: sha1:93382da0228027a02501abbcf681f247814d3d68 pristine_git_object: 989f49e9bcb29f4127cb11df683c76993f14eba8 - docs/models/documentupdatein.md: - id: 185ab27259a7 - last_write_checksum: sha1:e0faccd04229204968dbc4e8131ee72f81288182 - pristine_git_object: 0993886d56868aba6844824f0e0fdf1bdb9d74f6 + docs/models/documentunion.md: + id: c65f9e42375c + last_write_checksum: sha1:249043e03067f79b27dc6eac410fb937920e8cdb + pristine_git_object: e573bd4632493ca648ad61307c70148366625d4b docs/models/documentupload.md: id: 7ff809a25eb0 last_write_checksum: sha1:aea0f81009be09b153019abbc01b2918a1ecc1f9 pristine_git_object: 4e58a475f1776431c9c27a0fcdd00dd96257801f docs/models/documenturlchunk.md: id: 48437d297408 - last_write_checksum: sha1:38c3e2ad5353a4632bd827f00419c5d8eb2def54 - pristine_git_object: 6c9a5b4d9e6769be242b27ef0208f6af704689c0 - docs/models/documenturlchunktype.md: - id: a3574c91f539 - last_write_checksum: sha1:a0134fc0ea822d55b1204ee71140f2aa9d8dbe9c - pristine_git_object: 32e1fa9e975a3633fb49057b38b0ea0206b2d8ef - docs/models/downloadfilerequest.md: - id: 5acd7aafd454 - last_write_checksum: sha1:5d7056818ddc5860e43699917496ded68b91ddfa - pristine_git_object: 3f4dc6ccc6d1c67396fe97197067c5421d8dc2d5 + last_write_checksum: sha1:5f9294355929d66834c52c67990ba36a7f81387d + pristine_git_object: 9dbfbe5074de81b9fcf6f5bae8a0423fb2c82f71 docs/models/embeddingdtype.md: id: 22786e732e28 last_write_checksum: sha1:dbd16968cdecf706c890769d8d1557298f41ef71 @@ -700,10 +696,10 @@ trackedFiles: id: da9a99ab48ab last_write_checksum: sha1:4971db390327db09f88feff5d2b8a0b1e6c5b933 pristine_git_object: d934b6774b25713afe923154d7709755426ec2cf - docs/models/eventout.md: - id: 9960732c3718 - last_write_checksum: sha1:dbc23814b2e54ded4aa014d63510b3a2a3259329 - pristine_git_object: d9202353be984d51b9c05fb0f490053ce6ccfe4a + docs/models/event.md: + id: 311c22a8574a + last_write_checksum: sha1:627793d6aed5e378e3f2eeb4087808eb50e948d5 + pristine_git_object: 3eebffca874b8614a5be3d75be3cb7b0e52c2339 docs/models/file.md: id: 4ad31355bd1c last_write_checksum: sha1:ade4d3c908c664a07a3c333cc24bc1bfb43ab88b @@ -716,14 +712,30 @@ trackedFiles: id: ed6216584490 last_write_checksum: sha1:02767595f85228f7bfcf359f8384b8263580d53a pristine_git_object: 14cab13ee191ae60e2c5e1e336d0a5abc13f778b + docs/models/filesapiroutesdeletefilerequest.md: + id: 7fdf9a97320b + last_write_checksum: sha1:411e38d0e08a499049796d1557f79d669fc65107 + pristine_git_object: 1b02c2dbb7b3ced86ddb49c2323d1d88732b480c + docs/models/filesapiroutesdownloadfilerequest.md: + id: b9c13bb26345 + last_write_checksum: sha1:1f41dad5ba9bd63881de04d24ef49a0650d30421 + pristine_git_object: 8b28cb0e5c60ac9676656624eb3c2c6fdc8a3e88 + docs/models/filesapiroutesgetsignedurlrequest.md: + id: 08f3772db370 + last_write_checksum: sha1:26aa0140444ccef7307ef6f236932032e4784e8f + pristine_git_object: dbe3c801003c7bb8616f0c5be2dac2ab1e7e9fb1 + docs/models/filesapirouteslistfilesrequest.md: + id: 04bdf7c654bd + last_write_checksum: sha1:0a99755150c2ded8e5d59a96527021d29326b980 + pristine_git_object: 57d11722f1dba2640df97c22be2a91317c240608 + docs/models/filesapiroutesretrievefilerequest.md: + id: 2783bfd9c4b9 + last_write_checksum: sha1:a1249ef0aedb3056e613078488832c96b91f8cab + pristine_git_object: 961bae1f51a4ae9df21b28fd7a5ca91dc7b3888b docs/models/fileschema.md: id: 9a05a660399d last_write_checksum: sha1:97987d64285ff3092635754c78ad7b68d863e197 pristine_git_object: 4f3e72dba17a964155007755ad9d69f0304b2adb - docs/models/filesignedurl.md: - id: c0a57176d62e - last_write_checksum: sha1:2c64ef5abc75e617496f0a28d3e1cebfe269a6b9 - pristine_git_object: 52ce3f4f0c44df0ef3ed1918f92ad63f76ffc144 docs/models/fimcompletionrequest.md: id: b44677ecc293 last_write_checksum: sha1:24bcb54d39b3fabd487549a27b4c0a65dd5ffe50 @@ -748,6 +760,10 @@ trackedFiles: id: e16926b57814 last_write_checksum: sha1:52006811b756ff5af865ed6f74838d0903f0ee52 pristine_git_object: 34b24bd4db1ad3f9e77e2c6a45a41d2fbc5cf7fd + docs/models/finetunedmodelcapabilities.md: + id: 3a6a0498ccf7 + last_write_checksum: sha1:82fc7d3f4e0b591b757f202699bb645bc61c69ff + pristine_git_object: d3203a2adccb7eb89c58395952c3e5a123a5b31b docs/models/format_.md: id: a17c22228eda last_write_checksum: sha1:dad6de59fec6378d50356007602e2a0254d8d2e4 @@ -756,10 +772,6 @@ trackedFiles: id: b546cfde5aa6 last_write_checksum: sha1:752d9d238a90a3ef55205576fa38cee56ea1539e pristine_git_object: 919cdd384315c99d4b590bc562298403733344ce - docs/models/ftmodelcapabilitiesout.md: - id: f7be0dd1d889 - last_write_checksum: sha1:670412a0c0268f646dd444537bd79ce9440170c8 - pristine_git_object: 19690476c64ac7be53f974347c1618730f0013ce docs/models/ftmodelcard.md: id: 15ed6f94deea last_write_checksum: sha1:1c560ceaaacc1d109b2997c36de03192dfcda941 @@ -774,140 +786,72 @@ trackedFiles: pristine_git_object: 7ccd90dca4868db9b6e178712f95d375210013c8 docs/models/functioncallentry.md: id: 016986b7d6b0 - last_write_checksum: sha1:bd3e67aea9eb4f70064e67e00385966d44f73f24 - pristine_git_object: fd3aa5c575019d08db258842262e8814e57dc6d5 + last_write_checksum: sha1:373eb3a2d72596fcbb8933b28426896d5ac6b6f4 + pristine_git_object: 2843db9d36d8b82a15ebfce0833c8b0832609b4a docs/models/functioncallentryarguments.md: id: c4c609e52680 last_write_checksum: sha1:ae88aa697e33d60f351a30052aa3d6e2a8a3e188 pristine_git_object: f1f6e39e724673556a57059a4dbda24f31a4d4b9 - docs/models/functioncallentryobject.md: - id: ea634770754e - last_write_checksum: sha1:d6bc885e9689397d4801b76c1a3c8751a75cf212 - pristine_git_object: 3cf2e427bfb6f2bc7acea1e0c6aafe965187f63f - docs/models/functioncallentrytype.md: - id: b99da15c307b - last_write_checksum: sha1:04665a6718ad5990b3beda7316d55120fbe471b0 - pristine_git_object: 7ea34c5206bdf205d74d8d49c87ddee5607582e9 + docs/models/functioncallentryconfirmationstatus.md: + id: 18f36160d744 + last_write_checksum: sha1:cc3ea4e03d26a1b22f94d42a87bd5ae63535d266 + pristine_git_object: 8948beb6d9ac647ada655960284dfc7f6d1f5ca1 docs/models/functioncallevent.md: id: cc9f2e603464 - last_write_checksum: sha1:942d1bed0778ba4738993fcdbefe080934b641d5 - pristine_git_object: f406206086afa37cbc59aa551ac17a4814dddf7e + last_write_checksum: sha1:58c6ee00af0c63614fd7506345977f9f2d8838ec + pristine_git_object: 0e3a36d6045a69e96c40836cdb586424225775af + docs/models/functioncalleventconfirmationstatus.md: + id: a33cc7957407 + last_write_checksum: sha1:36ac2d3442d83cbb1256e86f413134296bf8e90f + pristine_git_object: 4a3c8774d4eec4e1f5fea23a1827082e09f91669 docs/models/functionname.md: id: 4b3bd62c0f26 last_write_checksum: sha1:754fe32bdffe53c1057b302702f5516f4e551cfb pristine_git_object: 87d7b4852de629015166605b273deb9341202dc0 docs/models/functionresultentry.md: id: 24d4cb18998c - last_write_checksum: sha1:528cae03e09e43bdf13e1a3fef64fd9ed334319b - pristine_git_object: 6df54d3d15e6d4a03e9af47335829f01a2226108 - docs/models/functionresultentryobject.md: - id: 025dc546525c - last_write_checksum: sha1:01a0085fb99253582383dd3b12a14d19c803c33c - pristine_git_object: fe52e0a5a848ea09dfb4913dd8d2e9f988f29de7 - docs/models/functionresultentrytype.md: - id: 69651967bdee - last_write_checksum: sha1:41489b0f727a00d86b313b8aefec85b4c30c7602 - pristine_git_object: 35c94d8e553e1cb641bef28fec2d8b3576d142f6 + last_write_checksum: sha1:1758992e30517b505b8d0622a54545dc9ae19163 + pristine_git_object: 6a77abfd27e3e46de950646d7f89777dca11300e docs/models/functiontool.md: id: 5fb499088cdf last_write_checksum: sha1:a9a3b6530b1c48a8575402b48cde7b65efb33a7d pristine_git_object: 0226b7045c9d82186e1111bb2025e96a4de90bd6 - docs/models/getagentagentversion.md: - id: 825de6d2614f - last_write_checksum: sha1:d99f384ff5ee73e68fa7f8581d7622068b5b7498 - pristine_git_object: 6d7b3f1d15994c24a5b992d1908fe8126da0e3ea - docs/models/getagentrequest.md: - id: 743f3a4630be - last_write_checksum: sha1:4d17d6b7b15e39520414085fc977be881e4e0a85 - pristine_git_object: 3f729dff0f7fc773f83593222da0dd0618b3e8b3 - docs/models/getagentversionrequest.md: - id: 4bf5feb4494a - last_write_checksum: sha1:d26546c2fdd78e0f52e2a2c50736b412ce814f6e - pristine_git_object: c98fee9d141f556520e16189e90234063e6861eb - docs/models/getbatchjobrequest.md: - id: 0c3a5debd663 - last_write_checksum: sha1:c186bbc6b04e1ed2db32f68fb22cb7eff4c1a90c - pristine_git_object: f3c67eb4a898a21e8a78c3340171458dcbd21d58 - docs/models/getconversationhistoryrequest.md: - id: 27de0e44ed80 - last_write_checksum: sha1:d89318332c87b5fa3bba22a52e209bdd5702b3f0 - pristine_git_object: fc90282bd9308a7531c3c532234fd332a223f243 - docs/models/getconversationmessagesrequest.md: - id: 82bf9b5c275b - last_write_checksum: sha1:cdbb0371c7a35e84f7938d28719acd843ebc15ce - pristine_git_object: fd037fea6c09d97bfb74332838a2b2760de4dccb - docs/models/getconversationrequest.md: - id: ad6c903380f6 - last_write_checksum: sha1:ee93a91d5daa01fc937dd09589b268bb2e42868a - pristine_git_object: 8a66a8b032cb67503c0f6b95c98e0a40b13d16ec - docs/models/getdocumentextractedtextsignedurlrequest.md: - id: d47f32212cf5 - last_write_checksum: sha1:7d695630988d5ab3773aabfe17c3fa9177d7e9c9 - pristine_git_object: ff703802ddfe0e36768daf87f4c5626028642370 - docs/models/getdocumentrequest.md: - id: 4208f9b571b3 - last_write_checksum: sha1:45f6807e2f7cd4c30f95304172cb556896571b76 - pristine_git_object: 29f62127b09511f14a065b9b6f6068e63643ab7c - docs/models/getdocumentsignedurlrequest.md: - id: 734960a10101 - last_write_checksum: sha1:04debc445e51e7d0f922bfe7873d639a844c17b4 - pristine_git_object: 72a179c086e38650afd81165575c7926d9566f69 - docs/models/getdocumentstatusrequest.md: - id: d0a69468ea34 - last_write_checksum: sha1:a8d91948737e4fa392221ec18970d27af90c203e - pristine_git_object: 3557d7738be21206061ef5806b79118432b33f26 - docs/models/getdocumenttextcontentrequest.md: - id: 6baa6485417b - last_write_checksum: sha1:5b47d1d8d5675e4b9f477c8034ef64afc912cd06 - pristine_git_object: 8593340139f28b44dfed455849198f5d5a457643 - docs/models/getfilesignedurlrequest.md: - id: c7b1953174af - last_write_checksum: sha1:d558115d1611827f461cc6a9f373885271c7a51d - pristine_git_object: 0be3b2888b0680d5a5fac0057cedc279d112ddb8 - docs/models/getfinetuningjobrequest.md: - id: c18796fe85f3 - last_write_checksum: sha1:8166520e2d657098131fd77c81a86099ed4d3486 - pristine_git_object: f20cb2148330c7078c6e93f55aa99f1b09086eaf - docs/models/getfinetuningjobresponse.md: - id: 8f50d4a61ae1 - last_write_checksum: sha1:509e8d190b43b5a4a3e0ae7d97bf2b4262fcd1f8 - pristine_git_object: 1b0568dd8019879ec2e1d0ff039296f600415e21 - docs/models/getlibraryrequest.md: - id: 9c9a9e6c4f03 - last_write_checksum: sha1:822494a821ee3a51a477f305c140ed39cd6465fc - pristine_git_object: 2a3acf50a6300ea3bcbc3b8432fe28cbef82c620 + docs/models/getfileresponse.md: + id: a983b3c8acd6 + last_write_checksum: sha1:5ca732ae5b384937473c04de6736fbab34deca24 + pristine_git_object: 0edd13e0818fc70c9c4db1e08b1490c1e146ea63 + docs/models/getsignedurlresponse.md: + id: 5539e5d7c3d4 + last_write_checksum: sha1:7198474f48bfba6d47326cd436e4a00a8ba70ce3 + pristine_git_object: bde693236406fe092f48c315e3b68a2fbbe6f9a4 + docs/models/githubrepository.md: + id: 66c120df624b + last_write_checksum: sha1:045e538dd7faffc1c6c6e6816563c5c8e776a276 + pristine_git_object: 827b6f34ae68ace7b8b4811764f59de2e0fcdd22 docs/models/githubrepositoryin.md: id: b42209ef8423 last_write_checksum: sha1:5ab33fc1b0b5513086b1cae07f416d502441db23 pristine_git_object: 241cf584d5e2425e46e065f47a18bea50fa624db - docs/models/githubrepositoryout.md: - id: 0ca86e122722 - last_write_checksum: sha1:0e3999cef8a745ae24ac36907b3431bc5103ea6f - pristine_git_object: fe38393a0cc2eb5c0b0c4690cb0c4e5e3ec41df8 - docs/models/httpvalidationerror.md: - id: a211c095f2ac - last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e - pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/hyperparameters.md: id: c167bad5b302 - last_write_checksum: sha1:5b7f76360dea58be5350bbe074482da45e57599c - pristine_git_object: 46a6dd6baa1b1574bad5eadc1e83d4b72d56c0c8 + last_write_checksum: sha1:e391cf72690e6cd01a2878081b8d87938e1c6639 + pristine_git_object: b6c00c3647d21789c92ad7d32dd29c3089ca134f + docs/models/imagedetail.md: + id: f8217529b496 + last_write_checksum: sha1:fdf19ac9459f64616240955cb81a84ef03e775c8 + pristine_git_object: 1e5ba3fd405a14e5e2872cc85504584dca19b726 docs/models/imagegenerationtool.md: id: d5deb6b06d28 - last_write_checksum: sha1:b3decee8fe7a824401f9afbd3544a69ccde4ef8e - pristine_git_object: 0c8de72cdd7149217010ae5d02777d1c5dd9896c + last_write_checksum: sha1:a1813ef99e4a0990fd073bb2311c475e88072029 + pristine_git_object: b476b6f2733a49767d7f7a4ad091fc321ab514f4 docs/models/imageurl.md: id: e75dd23cec1d - last_write_checksum: sha1:30131c77dd240c3bae48d9693698358e5cc0ae63 - pristine_git_object: 7c2bcbc36e99c3cf467d213d6a6a59d6300433d8 + last_write_checksum: sha1:a5cf621ce58a9cc7c96afa7de53367eac7b4cb0b + pristine_git_object: 6358e0acb2dea4816203413842243704ca955783 docs/models/imageurlchunk.md: id: 4407097bfff3 - last_write_checksum: sha1:73e14a0beccfc9465ee6d2990462e609903f5cd5 - pristine_git_object: 43078c7849fb3e808c2eaeaa5a3caeab2619d700 - docs/models/imageurlchunktype.md: - id: b9af2db9ff60 - last_write_checksum: sha1:990546f94648a09faf9d3ae55d7f6ee66de13e85 - pristine_git_object: 2064a0b405870313bd4b802a3b1988418ce8439e + last_write_checksum: sha1:da7a792f7b649f311062338dfbf3d25ff55fe6c5 + pristine_git_object: db0c53d22e29fa25222edb86b264e5135879a029 docs/models/imageurlunion.md: id: 9d3c691a9db0 last_write_checksum: sha1:4e32bcd7d44746d2ddbfafbef96152bb2bdb2a15 @@ -918,124 +862,184 @@ trackedFiles: pristine_git_object: b44a467d258cfa8cc3d2a3236330471dbc3af109 docs/models/inputs.md: id: 4b0a7fb87af8 - last_write_checksum: sha1:19d8da9624030a47a3285276c5893a0fc7609435 - pristine_git_object: 0f62a7ce8e965d0879507e98f808b9eb254282a6 - docs/models/inputsmessage.md: - id: 174dcada287d - last_write_checksum: sha1:92a95c1757e33603d1aa9ed6c9912d1c551d9974 - pristine_git_object: e3543fb4f9fff679b25f7f803eb2e8dabd56368f + last_write_checksum: sha1:c5f0c21c25fd5a698398a9e4ddf6261add60740c + pristine_git_object: d5771207d9725f04ca2ab1be692fc089360a58f4 docs/models/instructrequest.md: id: a0034d7349a2 last_write_checksum: sha1:34a81411110cbb7a099c45e482f5d1702ae48fd3 pristine_git_object: 5f0cdfff135fb72d3b1a81999a30b720c044e3d4 - docs/models/instructrequestinputs.md: - id: 2a677880e32a - last_write_checksum: sha1:64bcc6371d70446da60f167682504568d7f2618c - pristine_git_object: 931ae5e47df2d2850e3ef6740e2b89e1e0138297 docs/models/instructrequestmessage.md: id: 380503708a09 last_write_checksum: sha1:551b5d6dd3ba0b39cad32478213a9eb7549f0023 pristine_git_object: 57ed27ab3b1430514797dd0073bc87b31e5e3815 - docs/models/jobin.md: - id: 1b7b37214fa8 - last_write_checksum: sha1:0a241378cf3791c5c3fa733f30d45c07ef841448 - pristine_git_object: 62da90727898dd84f547c436c17fefa788e4f0d6 - docs/models/jobinintegration.md: - id: 200c505fa67f - last_write_checksum: sha1:c9887897357e01e6e228b48d6bf0c3fb4edd29f7 - pristine_git_object: 103820e7ec55769227610c385addbecfcd075cae - docs/models/jobinrepository.md: - id: 9ab1d5469c10 - last_write_checksum: sha1:1773f59546b94688d0be16d3f5f014cd86f5b1d7 - pristine_git_object: e873ae63f359d6ac4aca03b058a7c25fbbf2ba32 - docs/models/jobmetadataout.md: - id: 30eb634fe247 - last_write_checksum: sha1:46d54b6f6004a6e571afd5207db5170dfbce7081 - pristine_git_object: 6218a161b71abbb35eb4ca6e3ce664226983efc2 - docs/models/jobsout.md: - id: cbe31f43047d - last_write_checksum: sha1:4bd9ffbd2e5a286090167c795b9c3970e3c7d0a5 - pristine_git_object: 69f8342ac6f02a6e60d05b6f5b3cd892964fd3d7 - docs/models/jobsoutdata.md: - id: 809574cac86a - last_write_checksum: sha1:06455044d314c4edbd1ce4833d551c10918f0a3e - pristine_git_object: 28cec31117416b79eb8688d84b47b157974574cc + docs/models/jobmetadata.md: + id: 1f8e4c2f49e5 + last_write_checksum: sha1:a29ec10cd129b955672f60aaf526905780afe6b6 + pristine_git_object: 5d8a89ddc6b401a80e23d51cb378cdac5d4eb342 + docs/models/jobsapiroutesbatchcancelbatchjobrequest.md: + id: 798cb1ca1385 + last_write_checksum: sha1:67e8bda117608aee0e09a702a1ef8a4b03c40b68 + pristine_git_object: c19d0241784ff69bc68a11f405437400057d6f62 + docs/models/jobsapiroutesbatchgetbatchjobrequest.md: + id: e83a7ec84f8a + last_write_checksum: sha1:d661875832b4b9d5f545262216c9fcb9a77c8cd0 + pristine_git_object: 8c259bea9bef11f779fd609f1212565d574457e2 + docs/models/jobsapiroutesbatchgetbatchjobsrequest.md: + id: 5b9c44ad4d31 + last_write_checksum: sha1:1d7c05337b7cfe68f85a36576d060e1a890f9f96 + pristine_git_object: 5ceb0b2c40f079ffbe2cc4c82f6c3f94276980b4 + docs/models/jobsapiroutesfinetuningarchivefinetunedmodelrequest.md: + id: 8eb8c127091e + last_write_checksum: sha1:2b93a6bed5743461bb03c8337fb25dfc5a15522e + pristine_git_object: f9700df50b8f512c4139c1830aba18989d022b8e + docs/models/jobsapiroutesfinetuningcancelfinetuningjobrequest.md: + id: deff83b39b78 + last_write_checksum: sha1:dac8d8f2e95aed2db9b46711e6e80816881d5d14 + pristine_git_object: 883cbac685563d2e0959b63638f6b967ebdf1ee9 + docs/models/jobsapiroutesfinetuningcancelfinetuningjobresponse.md: + id: c45757ba1ed9 + last_write_checksum: sha1:4931469b58d454264f1e8d32df6a07d3f6f01022 + pristine_git_object: fb62eb62027c8151d597544fcaf27b972aeb78b3 + docs/models/jobsapiroutesfinetuningcreatefinetuningjobresponse.md: + id: 8aa8030f26d7 + last_write_checksum: sha1:4aada0d2297479d8276f5a422cb4dd6b56b1e176 + pristine_git_object: 7b52e2ca6365f17ac3b19d128937783d87c7fa37 + docs/models/jobsapiroutesfinetuninggetfinetuningjobrequest.md: + id: a9b75762e534 + last_write_checksum: sha1:8f1395447928e089c88dce8c0ced1030ec5f0eba + pristine_git_object: fde19800303a901149bf39c5330ef8c4da87df62 + docs/models/jobsapiroutesfinetuninggetfinetuningjobresponse.md: + id: c0b31f4fc621 + last_write_checksum: sha1:4ceb9df28082bf5d496cd222a0f45dc81a576367 + pristine_git_object: f770532776a13860e697da7478d1677d16f0ec36 + docs/models/jobsapiroutesfinetuninggetfinetuningjobsrequest.md: + id: 52078f097503 + last_write_checksum: sha1:fc134fdc7e229b8df373b77096c8299c214171a7 + pristine_git_object: 23c52c342358ea889b25ee7b18b381b68519c6cf + docs/models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md: + id: 8545ffb587d6 + last_write_checksum: sha1:bbc08ca53c2da180b96ed0347cf4954410c79311 + pristine_git_object: 40d57686aec11d9bdc4c116ea4c98183e0a6414c + docs/models/jobsapiroutesfinetuningstartfinetuningjobrequest.md: + id: b4e2b814d8c3 + last_write_checksum: sha1:f13b5c8f2e74cc73b58a30d366032c764603f95e + pristine_git_object: 4429fe480ab9486de98940a119ac63f40045313b + docs/models/jobsapiroutesfinetuningstartfinetuningjobresponse.md: + id: cfd848845787 + last_write_checksum: sha1:a165279fa0c9e051458ea4333dfdd31ef0440426 + pristine_git_object: 1a7e71d4479369f13c391a9782278557bc4531ae + docs/models/jobsapiroutesfinetuningunarchivefinetunedmodelrequest.md: + id: 75b5dd1bcbaa + last_write_checksum: sha1:dd30e7ff8748d26497458f3398c0547113dc058f + pristine_git_object: 95c1734daa7164bedeeb1fa58dd792939f25bc17 + docs/models/jobsapiroutesfinetuningupdatefinetunedmodelrequest.md: + id: 60bd2e28993a + last_write_checksum: sha1:58835c28cccaf90e99bbb72bf7c5a5ce42498824 + pristine_git_object: dbe49a86ca2bf64901133fd58a342d30909c35b2 + docs/models/jobsapiroutesfinetuningupdatefinetunedmodelresponse.md: + id: c265a30fd4cf + last_write_checksum: sha1:410c62a884aae902cdfbfcab33779e62487de13b + pristine_git_object: f40350bf9d74d09ca3a2ec6d91d9068bda631ef5 docs/models/jsonschema.md: id: a6b15ed6fac8 last_write_checksum: sha1:523465666ad3c292252b3fe60f345c7ffb29053f pristine_git_object: 7ff7c070353c58290416aff5b01d1dfc43905269 - docs/models/legacyjobmetadataout.md: - id: b3b8c262f61a - last_write_checksum: sha1:d8c4e7525e2dc2f4d29bfeb6cadc648fab1c62c7 - pristine_git_object: 8a712140fbf3c36f4bd9686e135b70d8688aa9c1 - docs/models/libraryin.md: - id: a08170e6397c - last_write_checksum: sha1:2c996ecf1ae5d9e8df702a79741b72b3571eb6ef - pristine_git_object: d6b119148725627bcf76594c4a24e915399cd8f8 - docs/models/libraryinupdate.md: - id: 6d06b6b21498 - last_write_checksum: sha1:4ec01d7f7e24f58a74613d4847725bfd516b7d7f - pristine_git_object: 4aa169c7669c00fcedc423fbff6f386697360787 - docs/models/libraryout.md: - id: 2e8b6d91ded2 - last_write_checksum: sha1:d71053b44725147265871be445217e3e1a0e5ede - pristine_git_object: ebf46d57de6bad7022a3e8cb8eaf88728bbbe888 - docs/models/listagentaliasesrequest.md: - id: 495659b2d40a - last_write_checksum: sha1:637e7e0e8deadcf2e77cc9469727010f90f0ad79 - pristine_git_object: b3570cb80d484dadaf2a138c70bbb477746ba416 - docs/models/listagentsrequest.md: - id: aeb9bbc163f5 - last_write_checksum: sha1:86c5f5068061b79d2e582e4dd9a8b0ed4c84cbcf - pristine_git_object: 79aec3ea6e3506797fc96a7ca9d7393543270866 - docs/models/listagentversionsrequest.md: - id: 3270f6dd4107 - last_write_checksum: sha1:14ffb20c5c48cca371ed27f6a6a8b565cd4a5565 - pristine_git_object: ba8ddaa5cb4c94623b29a1f635f38a04cc0ff497 - docs/models/listbatchjobsrequest.md: - id: e2a0b1528191 - last_write_checksum: sha1:01a587ec7cc6e183d47e106eb809e7c1e9e79e39 - pristine_git_object: 19981b2425254058bd24b218d1f7881fc3635c89 - docs/models/listconversationsrequest.md: - id: 6c0961051703 - last_write_checksum: sha1:453eb480cd48330f857b4c80210b6753a750348d - pristine_git_object: d99b420834b17f3f5b7fac630af7a7b0d2db341d - docs/models/listconversationsresponse.md: - id: 65075f5cf00c - last_write_checksum: sha1:8478c55b156c09f2b714d2854030a04494b48f7c - pristine_git_object: 9d611c553b245657181c06d7f65acaa9d8128556 - docs/models/listdocumentout.md: - id: 4bec19e96c34 - last_write_checksum: sha1:c0b3a6e3841f120c52b1d7718d7226a52fe1b6d6 - pristine_git_object: f14157b8db55c1201d9f7151742e9ddf0d191c16 - docs/models/listdocumentsrequest.md: - id: 36c8a1116534 - last_write_checksum: sha1:390849ce3d93a64c505b7b2f7cae411766a5e44b - pristine_git_object: 369e8edbe471dd5167ad1baf74ee5b00eb7d5043 - docs/models/listfilesout.md: - id: 98d4c59cc07e - last_write_checksum: sha1:e76df31628984095f1123005009ddc4b59b1c2bc - pristine_git_object: bcb1f13aa17f41dadb6af37541e929364e2d6cec - docs/models/listfilesrequest.md: - id: 70edaf3759f0 - last_write_checksum: sha1:686edbd5134dfe60cfd98221ec78d296a8429d28 - pristine_git_object: 2d76a76b011603e3a7c4b4932ef4b26def1cb792 - docs/models/listfinetuningjobsrequest.md: - id: 41878563fe80 - last_write_checksum: sha1:103cd0d3c5334ea60a6c6e1c2585bf9bd493c78f - pristine_git_object: 3a04fc709c2a12cc4f414701efcaec4584b7d6df - docs/models/listfinetuningjobsstatus.md: - id: 1d6d54dc70ea - last_write_checksum: sha1:c4f69e2b2b5aac719281d264722f2cba6aa048a0 - pristine_git_object: 07db9ae5d87b7192ada4843d4fe0d3e8573794c6 - docs/models/listlibraryaccessesrequest.md: - id: 0b387463f914 - last_write_checksum: sha1:2912e1fc3ee179f01fde7a21501e2501debecc2c - pristine_git_object: d98bcda22bbb2540a525f2ce1516a637446b0a0f - docs/models/listlibraryout.md: - id: ea34f8548bd6 - last_write_checksum: sha1:cec920357bc48bea286c05d16c480a9a9369b459 - pristine_git_object: db76ffa10eb97f143ad4a6930e520e389fe18153 + docs/models/legacyjobmetadata.md: + id: 50ac14d9b270 + last_write_checksum: sha1:ebe37a176ca318e797fee7ebf4eef73fb9938a12 + pristine_git_object: 4705ab4f67e10b8e2cbfc86b29c03a9945aeb8fb + docs/models/librariesdeletev1request.md: + id: c0c3b2e1aabc + last_write_checksum: sha1:bef84f8851b06d2d914b605f11109de1850d0294 + pristine_git_object: 68d7e54369ce75422bf8b0ff16cada1c0ae2b05c + docs/models/librariesdocumentsdeletev1request.md: + id: 9d557bd7d1cc + last_write_checksum: sha1:1b580b657559356886915ee5579b90a03db19337 + pristine_git_object: efccdb1bbc36cf644ed2d1716cbd202e6d6bf6c5 + docs/models/librariesdocumentsgetextractedtextsignedurlv1request.md: + id: 27ad38ce4cb1 + last_write_checksum: sha1:b35ad610330232b395b5f87cc15f6ae270de6816 + pristine_git_object: 14ca66f72693f1df05eb93e0cca45f440b62d282 + docs/models/librariesdocumentsgetsignedurlv1request.md: + id: 4498715b6cfb + last_write_checksum: sha1:31f78079e31e070d080c99555cd2d85318fc4610 + pristine_git_object: 7c08c180d59a8e8475fea89424b8b2021d51385f + docs/models/librariesdocumentsgetstatusv1request.md: + id: c2219d3a3738 + last_write_checksum: sha1:44e79df94cf2686e83d7a2e793140a6a7b3a1c05 + pristine_git_object: e6d41875966348fd9e770d06c8099e48f0e64b5d + docs/models/librariesdocumentsgettextcontentv1request.md: + id: 850dfa465952 + last_write_checksum: sha1:4a1212e111525f4265d2924ce52f9c13d2787d4d + pristine_git_object: 2f58a4460ccdad531391318c62191e76c1ec22ac + docs/models/librariesdocumentsgetv1request.md: + id: cdd0df2f7e9d + last_write_checksum: sha1:36e5ef39552159044ecd28d20ee0792ea5bcadef + pristine_git_object: 6febc058425bb38857c391ee4c40d600858e6058 + docs/models/librariesdocumentslistv1request.md: + id: 7b5756e50d64 + last_write_checksum: sha1:2605b7972a3d7b4f73ab8052be4bf740f44f6f6f + pristine_git_object: 44f6300115853053214639982516a60b3268e778 + docs/models/librariesdocumentsreprocessv1request.md: + id: 1b8bf57b3f0a + last_write_checksum: sha1:8528785c1b4ae18d6ec6f261d29d5daac0d420a3 + pristine_git_object: 196ba17b749ce9efc1c30189864e474896814f85 + docs/models/librariesdocumentsupdatev1request.md: + id: b9147b1c0e38 + last_write_checksum: sha1:ed3ae7761990bd26a4bf99cd4641822eb90d3d57 + pristine_git_object: d46308509330099e30a53dddad51da8a6186aa92 + docs/models/librariesdocumentsuploadv1request.md: + id: 89a89d889c72 + last_write_checksum: sha1:32294a87d8a0b173b4d6f12b607a1bb3da765776 + pristine_git_object: 172a6183f31eec3142a84637414484799c2a4677 + docs/models/librariesgetv1request.md: + id: f47ad71ec7ca + last_write_checksum: sha1:3b2bf1e4f6069d0c954e1ebf95b575a32c4adeac + pristine_git_object: 6e1e04c39c15a85d96710f8d3a8ed11a22412816 + docs/models/librariessharecreatev1request.md: + id: 99e7bb8f7fed + last_write_checksum: sha1:e40d710ad1023768a0574b3283ef35544f6b0088 + pristine_git_object: 4c05241de4ee5a76df335ae9ea71004bd02b8669 + docs/models/librariessharedeletev1request.md: + id: bc8adba83f39 + last_write_checksum: sha1:79fc5a9a3cee5b060f29edd95f00e0fea32579cf + pristine_git_object: 850e22ab79863ba544f453138322c0eb5bf544cd + docs/models/librariessharelistv1request.md: + id: 86e6f08565e2 + last_write_checksum: sha1:6f2ffff66fa5fb141d930bca7bb56e978d62b4a5 + pristine_git_object: 98bf6d17ab013c1dd3f0ab18c37bbfc1a63f1b76 + docs/models/librariesupdatev1request.md: + id: f7e51b528406 + last_write_checksum: sha1:6a33b0161702ecc335dd2859df1bbc05b73702a9 + pristine_git_object: c5c142db7aaa49990135c21eabde43b8c0fdf756 + docs/models/library.md: + id: e8ec114dd107 + last_write_checksum: sha1:a4d6e9a777ce3d63aac24432281933ce6e13b4a9 + pristine_git_object: 4319f43df922b4924a11d494002826cb8d6dea0b + docs/models/listbatchjobsresponse.md: + id: e03025d58630 + last_write_checksum: sha1:de42c9396546fc8487d0bd6ed15b4076599fa83f + pristine_git_object: c23e32201d12a2594f97a493f63b2b7b42b9e337 + docs/models/listdocumentsresponse.md: + id: f2091cee0405 + last_write_checksum: sha1:335d0ccd3a448e65739d5a0cfa2c67614daec031 + pristine_git_object: 47b9d3b73fdc85bf6e463c91790faf346df56664 + docs/models/listfilesresponse.md: + id: b15df90d2d59 + last_write_checksum: sha1:4840f26427acf8846a9f1e48136f0663c6e4cd87 + pristine_git_object: 802f685fb3a76afb86a69cf41e6de9339cd6fbc7 + docs/models/listfinetuningjobsresponse.md: + id: d04e4dfddf78 + last_write_checksum: sha1:cebaf361aa10f1f6c4299c3c8a34f32d301455ad + pristine_git_object: 00251242023e2161747ebf00b4c2959909e2b654 + docs/models/listfinetuningjobsresponsedata.md: + id: 59c80de4086d + last_write_checksum: sha1:5a0d91c251b4b9283895d9f19f7b9416f93d4468 + pristine_git_object: adb0644475841c6a4686e8c42790dd44eed43dc1 + docs/models/listlibrariesresponse.md: + id: 87e3bec10745 + last_write_checksum: sha1:00522e685ec71a54f5f272d66b82e650848eaf36 + pristine_git_object: e21b9ced628f6fd5ae891d4a46666ebc94546859 docs/models/listsharingout.md: id: a3249129f37e last_write_checksum: sha1:4831e4f02e1d5e86f138c7bb6b04d095aa4df30f @@ -1050,68 +1054,40 @@ trackedFiles: pristine_git_object: 76256fb913376a15d5bcd2531b18f1a78b980c9d docs/models/messageinputcontentchunks.md: id: 34aac9c271db - last_write_checksum: sha1:641cd1dba3721f85b049c5ee514879f067483949 - pristine_git_object: 4fd18a0dcb4f6af4a9c3956116f8958dc2fa78d1 + last_write_checksum: sha1:d8ffdfd8b5458497e2cb6a32f52900c3ca2a6ddf + pristine_git_object: 0561785082c741f39f930ab7ded5b6c6a9ade6ad docs/models/messageinputentry.md: id: eb74af2b9341 - last_write_checksum: sha1:07124339ecb87e31df5f0e2f887e23209dd269af - pristine_git_object: 52183a32330b3e0bf91a1bd5e541dfda12d3f1a0 + last_write_checksum: sha1:c91bfdf9426c51236b6ff33d127dbe62b051a9da + pristine_git_object: f8514fb3305dbe1df91db8d622cc33a753b63623 docs/models/messageinputentrycontent.md: id: 7e12c6be6913 last_write_checksum: sha1:6be8be0ebea2b93712ff6273c776ed3c6bc40f9a pristine_git_object: 65e55d97606cf6f3119b7b297074587e88d3d01e - docs/models/messageinputentryobject.md: - id: 9a1d0d31f357 - last_write_checksum: sha1:7746753005fda37834a73e62bf459eacb740ba5b - pristine_git_object: 6bdd62e27d7353dbb7d521ad02bde358496ab108 - docs/models/messageinputentryrole.md: - id: 2497d07a793d - last_write_checksum: sha1:a41eb58f853f25489d8c00f7a9595f443dcca2e6 - pristine_git_object: f2fdc71d8bc818b18209cd1834d4fead4dfd3ba6 - docs/models/messageinputentrytype.md: - id: 5d2a466dad0f - last_write_checksum: sha1:19f689ffdd647f3ddc747daf6cb0b4e811dfdcee - pristine_git_object: d3378124db83c92174e28fe36907263e2cbe6938 docs/models/messageoutputcontentchunks.md: id: 802048198dc0 - last_write_checksum: sha1:d70a638af21ee46126aa0434bf2d66c8dd8e43ff - pristine_git_object: d9c3d50e295b50618f106ef5f6b40929a28164df + last_write_checksum: sha1:8cf4e4ea6b6988e22c117d8f689bbfb0869816ad + pristine_git_object: c4a7777e7675ebf2384311ec82b2713da69e5900 docs/models/messageoutputentry.md: id: f969119c8134 - last_write_checksum: sha1:cf5032929394584a31b3f12f55dfce6f665f71c7 - pristine_git_object: 5b42e20d1b03263f3d4d9f5cefe6c8d49c984e01 + last_write_checksum: sha1:f50b955cd622a6160c0ada34b0e14bff612802b7 + pristine_git_object: 73a1c666acc913b96d65a124612c4a728882bbc9 docs/models/messageoutputentrycontent.md: id: 44019e6e5698 last_write_checksum: sha1:d0cc7a8ebe649614c8763aaadbf03624bb9e47e3 pristine_git_object: 5206e4eb0d95e10b46c91f9f26ae00407d2dd337 - docs/models/messageoutputentryobject.md: - id: b3a7567581df - last_write_checksum: sha1:46528a6f87408c6113d689f2243eddf84bcbc55f - pristine_git_object: bb254c82737007516398287ff7878406866dceeb - docs/models/messageoutputentryrole.md: - id: bf7aafcdddab - last_write_checksum: sha1:e28643b6183866b2759401f7ebf849d4848abb10 - pristine_git_object: 783ee0aae4625f7b6e2ca701ac8fcdddcfe0e412 - docs/models/messageoutputentrytype.md: - id: 960cecf5fde3 - last_write_checksum: sha1:b6e52e971b6eb69582162a7d96979cacff6f5a9c - pristine_git_object: cb4a7a1b15d44a465dbfbd7fe319b8dbc0b62406 docs/models/messageoutputevent.md: id: b690693fa806 - last_write_checksum: sha1:d6538a4b5d5721c09bc196f3e9523ed45dafbea7 - pristine_git_object: b0fa1a2d369c89ec75f43c6b31ff52b0d80d9b1c + last_write_checksum: sha1:a4157c087ff95fa9445757c9d363615718156164 + pristine_git_object: e09a965f7d4cc35d6b120ba5555d96ba7b3e8a27 docs/models/messageoutputeventcontent.md: id: cecea075d823 last_write_checksum: sha1:16dac25382642cf2614e24cb8dcef6538be34914 pristine_git_object: 16d8d52f6ff9f43798a94e96c5219314731ab5fb - docs/models/messageoutputeventrole.md: - id: 87d07815e9be - last_write_checksum: sha1:a6db79edc1bf2d7d0f4762653c8d7860cb86e300 - pristine_git_object: e38c6472e577e0f1686e22dc61d589fdb2928434 - docs/models/metricout.md: - id: 7c6ff0ad95f9 - last_write_checksum: sha1:eef34dc522a351e23d7371c00a07662a0711ea73 - pristine_git_object: 3c552bac2fa3a5a3783db994d47d255a94643110 + docs/models/metric.md: + id: a812a3e37338 + last_write_checksum: sha1:14016848dcfaba90014b482634ed6d5715caa860 + pristine_git_object: 7f86303651650177ece51b82d867cab858e830ae docs/models/mistralpromptmode.md: id: d17d5db4d3b6 last_write_checksum: sha1:abcb7205c5086169c7d9449d15ac142448a7d258 @@ -1122,12 +1098,8 @@ trackedFiles: pristine_git_object: c7dd2710011451c2db15f53ebc659770e786c4ca docs/models/modelconversation.md: id: 497521ee9bd6 - last_write_checksum: sha1:440c9e7c306f20bd4f4b27ab0cf770d3bf8762e2 - pristine_git_object: 813e1f3a79ad14eae55bbb1b96598d6260904d9d - docs/models/modelconversationobject.md: - id: 4c5699d157a9 - last_write_checksum: sha1:8e2e82e1fa4cb97f8c7a8a129b3cc9cd651e4055 - pristine_git_object: ead1fa26f5d9641a198a14b43a0f5689456e5821 + last_write_checksum: sha1:22a8d7502eeaf176fbd1c7b22b512b4f9e4e043f + pristine_git_object: af2e5c6149339a561b03b954cd0e71f9d9aeffd6 docs/models/modelconversationtool.md: id: 2dd28167bc36 last_write_checksum: sha1:9b33f73330e5ae31de877a904954efe342e99c4f @@ -1166,8 +1138,8 @@ trackedFiles: pristine_git_object: 02473d44f73485fd7b7f0031d51bfac835d4036e docs/models/ocrrequest.md: id: 6862a3fc2d0f - last_write_checksum: sha1:9311e2c87f8f4512c35a717d3b063f2861f878d4 - pristine_git_object: 87929e53f8a74823b82ecce56d15f22228134fa6 + last_write_checksum: sha1:2faa819df648d330074c177d8f5d4a9c9a27bc90 + pristine_git_object: dd3fc2ea28cc2bc147473ba9f73aa32a9528632a docs/models/ocrresponse.md: id: 30042328fb78 last_write_checksum: sha1:8e4a4ae404ea752f3e9f1108c2a5f89ed6cfb143 @@ -1190,8 +1162,8 @@ trackedFiles: pristine_git_object: d0ee0db93f56c40f6684fcfdb5873aba586bc876 docs/models/outputcontentchunks.md: id: f7e175c8e002 - last_write_checksum: sha1:5094466110028801726cc825e8809f524fe1ee24 - pristine_git_object: c76bc31d4d8791b7bef4dc6cbff6671b38a7927d + last_write_checksum: sha1:5adb0733a8ca9b224155dfef66dfb37b7f416972 + pristine_git_object: e5185014faa41b6e6d1567d713fc390f551fad01 docs/models/paginationinfo.md: id: 3d2b61cbbf88 last_write_checksum: sha1:1da38e172024fe703f3180ea3c6ec91fe3c51ed0 @@ -1216,10 +1188,22 @@ trackedFiles: id: d25137243bef last_write_checksum: sha1:f8c3a4984d647d64e8ea4e1e42654265ffe46b0f pristine_git_object: da3764ef56337bdc773eaf8e9aa747cbd1b407e2 + docs/models/realtimetranscriptioninputaudioappend.md: + id: fa2aa317d1ca + last_write_checksum: sha1:59cce0828505fdb55104cd3144b75334e0f31050 + pristine_git_object: 5ee365eb9a993933509ac4666bcec24bfcc6fccd + docs/models/realtimetranscriptioninputaudioend.md: + id: 11045f9cc039 + last_write_checksum: sha1:945ca0475826294e13aba409f3ae2c2fc49b1b67 + pristine_git_object: 393d208c6e242959161f4436d53cf4aa2df69a92 + docs/models/realtimetranscriptioninputaudioflush.md: + id: c2f2258e0746 + last_write_checksum: sha1:a4e6d160da44c6f57b01059f7198208702e9b06a + pristine_git_object: 367725baa278935a6a282338ca7f2a23895a86d8 docs/models/realtimetranscriptionsession.md: id: aeb0a0f87d6f - last_write_checksum: sha1:c3aa4050d9cc1b73df8496760f1c723d16183f3a - pristine_git_object: 94a0a89e8ca03866f8b09202a28c4e0f7c3af2e6 + last_write_checksum: sha1:d72bf67442ac5e99f194c429e96a504685f02efb + pristine_git_object: 750bd7f79b65666812c6207d7085b9437c49517d docs/models/realtimetranscriptionsessioncreated.md: id: aa2ae26192d6 last_write_checksum: sha1:d13fec916d05300c86b52e951e81b1ceee230634 @@ -1228,26 +1212,26 @@ trackedFiles: id: 56ce3ae7e208 last_write_checksum: sha1:833db566b2c8a6839b43cb4e760f2af53a2d7f57 pristine_git_object: 7e2719957aae390ee18b699e61fbc7581242942f + docs/models/realtimetranscriptionsessionupdatemessage.md: + id: 02a5eee40cdd + last_write_checksum: sha1:44f8e6bc8f8cd4087a7e86c85db5141fab90f78d + pristine_git_object: 2a50ca92720bad6605bdeafd83b43d0e8bf40615 + docs/models/realtimetranscriptionsessionupdatepayload.md: + id: 3ddd5a95510a + last_write_checksum: sha1:33bca4d547ca812d55ac49bf7b17851b2fecfc80 + pristine_git_object: d6c6547d7895e53be15a0cce46b6524178acc3bc docs/models/referencechunk.md: id: 07895f9debfd - last_write_checksum: sha1:97d01dd2b907e87b58bebd9c950e1bef29747c89 - pristine_git_object: a132ca2fe6fbbaca644491cbc36d88b0c67cc6bc - docs/models/referencechunktype.md: - id: 0944b80ea9c8 - last_write_checksum: sha1:956b270766c7f11fe99f4a9b484cc29c159e7471 - pristine_git_object: 1e0e2fe64883ef5f3e628777b261b1224661d257 - docs/models/reprocessdocumentrequest.md: - id: 3c713aad474b - last_write_checksum: sha1:100b194196051470a2ae75cc2f707afec0c8d161 - pristine_git_object: cf3982a8cd76e4b2c8429acede0a12a044cbe2ca + last_write_checksum: sha1:4384049375a2566c7567599f97ce1ec19e9f6276 + pristine_git_object: d847e24845a399c7ca93d54701832fb65e01b3ab docs/models/requestsource.md: id: 8857ab6025c4 last_write_checksum: sha1:4b7ecc7c5327c74e46e2b98bd6e3814935cdecdf pristine_git_object: c81c115992439350d56c91d2e3351a13df40676b docs/models/response.md: id: 583c991c7a30 - last_write_checksum: sha1:f4a3ec06ff53cd1cbdf892ff7152d39fa1746821 - pristine_git_object: 3512b7a8f9fdfcaaed9a6db06ef4266629d9fa89 + last_write_checksum: sha1:0791cb4aa4045708ab64d42bf67bd6ab74bc7752 + pristine_git_object: ff67925758959b87992b47a1a32c224eeeb599e3 docs/models/responsedoneevent.md: id: 38c38c3c065b last_write_checksum: sha1:4ac3a0fd91d5ebaccce7f4098ae416b56e08416f @@ -1276,26 +1260,14 @@ trackedFiles: id: 48d4a45780a9 last_write_checksum: sha1:8e75db359f0d640a27498d20c2ea6d561c318d7e pristine_git_object: 844c5d610a9a351532d12b1a73f6c660059da76b - docs/models/restartconversationrequest.md: - id: b85b069aa827 - last_write_checksum: sha1:b7fb56a5561ab329f605d77795a610da8faaf561 - pristine_git_object: f24f14e67e749da884363038ca72891449cd99da - docs/models/restartconversationstreamrequest.md: - id: 65df276279f0 - last_write_checksum: sha1:907807c7e5969f82e70e743fddeb4c6f4278fc1a - pristine_git_object: daa661a9250701ad33241084d5033f73d75a9d6e - docs/models/retrievefileout.md: - id: 8e82ae08d9b5 - last_write_checksum: sha1:600d5ea4f75dab07fb1139112962affcf633a6c9 - pristine_git_object: 28f97dd25718833aaa42c361337e5e60488bcdc8 - docs/models/retrievefilerequest.md: - id: eac92ea7ca45 - last_write_checksum: sha1:c80772e3cfbe704385abe1b347d8e69d55bd9e00 - pristine_git_object: 454b9665b8134876488eb32c57a9dc45f4d972de - docs/models/retrievemodelrequest.md: - id: 392008b3324b - last_write_checksum: sha1:b9aafe10f0cd838a0b6959ec8dde5850ce59c55d - pristine_git_object: 787c3dd1000cba873c787fd5b9dcbe3c793f2b11 + docs/models/retrievemodelv1modelsmodelidgetrequest.md: + id: ac567924689c + last_write_checksum: sha1:7534c5ec5f1ae1e750c8f610f81f2106587e81a9 + pristine_git_object: f1280f8862e9d3212a5cfccd9453884b4055710a + docs/models/role.md: + id: b694540a5b1e + last_write_checksum: sha1:c7ef39a81299f3156b701420ef634a8b4fab76f0 + pristine_git_object: 853c6257d9bdb4eda9cb37e677d35ab477dca812 docs/models/sampletype.md: id: 0e09775cd9d3 last_write_checksum: sha1:33cef5c5b097ab7a9cd6232fe3f7bca65cd1187a @@ -1328,18 +1300,10 @@ trackedFiles: id: 6a902241137c last_write_checksum: sha1:567027284c7572c0fa24132cd119e956386ff9d0 pristine_git_object: ae06b5e870d31b10f17224c99af1628a7252bbc3 - docs/models/startfinetuningjobrequest.md: - id: 48fd313ae362 - last_write_checksum: sha1:f645c1e3e3244729eaa31aabb4b3ec0454fb114f - pristine_git_object: 9df5aee8f527fea4f0c9b02a28af77a65765be48 - docs/models/startfinetuningjobresponse.md: - id: 970045c710ff - last_write_checksum: sha1:78d230946abe19e928f286562ac589c7672c9854 - pristine_git_object: dce84c5a7711cd655a624b6ba0540504a6ff75d7 docs/models/systemmessage.md: id: fdb7963e1cdf - last_write_checksum: sha1:561c3372391e093c890f477b3213c308ead50b81 - pristine_git_object: dfb0cd0bd17aecbc1fe4b8410e78440f65038fef + last_write_checksum: sha1:c7603c5ce77ba2bcbda9eff65eeafdb1e9ecbec7 + pristine_git_object: 10bda10f921fb5d66c1606ff18e654b4e78ab197 docs/models/systemmessagecontent.md: id: 94a56febaeda last_write_checksum: sha1:6cb10b4b860b4204df57a29c650c85c826395aeb @@ -1354,24 +1318,16 @@ trackedFiles: pristine_git_object: 54f029b814fdcfa2e93e2b8b0594ef9e4eab792a docs/models/textchunk.md: id: 6cd12e0ef110 - last_write_checksum: sha1:f04818ca76e68b3d3684927e4032d5d7de882f6a - pristine_git_object: d488cb51abeb4913c8441d9fbe9e5b964099bb7e - docs/models/textchunktype.md: - id: 886e88ebde41 - last_write_checksum: sha1:ba8db2a3910d1c8af424930c01ecc44889335bd3 - pristine_git_object: e2a2ae8bcdf8a35ad580a7de6271a5d26cd19504 + last_write_checksum: sha1:d9fe94c670c5e0578212752c11a0c405a9da8518 + pristine_git_object: df0e61c32bc93ef17dbba50d026edace139fee6a docs/models/thinkchunk.md: id: bca24d7153f6 - last_write_checksum: sha1:feb95a931bb9cdbfe28ab351618687e513cf830b - pristine_git_object: 66b2e0cde70e25e2927180d2e709503401fddeab - docs/models/thinkchunktype.md: - id: 0fbeed985341 - last_write_checksum: sha1:790f991f95c86c26a6abb9c9c5debda8b53526f5 - pristine_git_object: baf6f755252d027295be082b53ecf80555039414 - docs/models/thinking.md: - id: 07234f8dd364 - last_write_checksum: sha1:a5962d1615b57996730da19e59fbfaa684321442 - pristine_git_object: c7a0d5c9811ea37aaf9e16b6e93c833ab979573f + last_write_checksum: sha1:0f861f1653035dea2018be9a977c15f54add9531 + pristine_git_object: 70c0369f16465e1b1f5f46e8cd799e5db536cdde + docs/models/thinkchunkthinking.md: + id: 22de7b5060fb + last_write_checksum: sha1:5e0722b8d513b38d60fbfe28635bdea40b951593 + pristine_git_object: dd1ecca12b5cda76a51b1e13335f1757a9dd7a68 docs/models/timestampgranularity.md: id: eb4d5a8e6f08 last_write_checksum: sha1:e256a5e8c6010d500841295b89d88d0eface3b88 @@ -1384,6 +1340,10 @@ trackedFiles: id: 80892ea1a051 last_write_checksum: sha1:cb27b9d36cfe6227978c7a7a01b1349b6bac99d9 pristine_git_object: 3819236b9f3eee2f6878818cfbbe2817e97f7de2 + docs/models/toolcallconfirmation.md: + id: 944eebb142ff + last_write_checksum: sha1:864ccb39a00094d965b764235e74709945abca3d + pristine_git_object: 1812f7d687d83f5692d9e79709e56813ab2c79b1 docs/models/toolchoice.md: id: "097076343426" last_write_checksum: sha1:25b33b34da02c3b46349dc8b6223f9ae18370d16 @@ -1392,6 +1352,10 @@ trackedFiles: id: 15410de51ffc last_write_checksum: sha1:ca0cf9bf128bebc8faedd9333cc6a56b30f58130 pristine_git_object: 0be3d6c54b13a8bf30773398a2c12e0d30d3ae58 + docs/models/toolconfiguration.md: + id: 06bfa2c4e662 + last_write_checksum: sha1:9b619977375f228c76f09d48d6e2833add6c07e2 + pristine_git_object: 89286a172124ce3473bcb081de6e4db8c95afefa docs/models/toolexecutiondeltaevent.md: id: f2fc876ef7c6 last_write_checksum: sha1:ae1462a9b5cb56002b41f477ce262cb64ccf2f4e @@ -1410,60 +1374,44 @@ trackedFiles: pristine_git_object: 6449079d7b467796355e3353f4245046cced17e8 docs/models/toolexecutionentry.md: id: 75a7560ab96e - last_write_checksum: sha1:fdaa9abd5417486100ffc7059fcfdc8532935ed3 - pristine_git_object: adf88fb1acec13bf8016eb42d6bdc5fd3bd279b5 + last_write_checksum: sha1:668d8fbc59bc729bf4b1d95d2f2bfe4097701c0e + pristine_git_object: 03316381b130cf02751b10fef4129c8f23072b76 docs/models/toolexecutionentryname.md: id: 86d537762559 last_write_checksum: sha1:6c528cdfbb3f2f7dc41d11f57c86676f689b8845 pristine_git_object: fb762a5382d8b0e93dc2eb277f18adf810057c55 - docs/models/toolexecutionentryobject.md: - id: af106f91001f - last_write_checksum: sha1:6df075bee4e84edf9b57fcf62f27b22a4e7700f4 - pristine_git_object: 0ca79af56d60094099c8830f638a748a92a40f21 - docs/models/toolexecutionentrytype.md: - id: b61e79a59610 - last_write_checksum: sha1:b0485bae901e14117f76b8e16fe80023a0913787 - pristine_git_object: a67629b8bdefe59d188969a2b78fa409ffeedb2a docs/models/toolexecutionstartedevent.md: id: 37657383654d - last_write_checksum: sha1:47126a25c2a93583038ff877b85fc9ae1dcef9f3 - pristine_git_object: c41c7258779f15f1f0436ad890f4947d780bfa75 + last_write_checksum: sha1:5a020d24bdeb4eb9976ce93a8daa91947026bde9 + pristine_git_object: 189b8a3d3b22d73000850a3f1a95b85e358c2090 docs/models/toolexecutionstartedeventname.md: id: be6b33417678 last_write_checksum: sha1:f8857baa02607b0a0da8d96d130f1cb765e3d364 pristine_git_object: 3308c483bab521f7fa987a62ebd0ad9cec562c3a docs/models/toolfilechunk.md: id: 67347e2bef90 - last_write_checksum: sha1:0a499d354a4758cd8cf06b0035bca105ed29a01b - pristine_git_object: a3ffaa2b8339ae3a090a6a033b022db61a75125b + last_write_checksum: sha1:2e4c6ce703733c02e62467507c231033716fdb92 + pristine_git_object: d60021755729f1a2870e24a500b3220c8f1fc6e3 docs/models/toolfilechunktool.md: id: eafe1cfd7437 last_write_checksum: sha1:73a31dbff0851612f1e03d8fac3dbbee77af2df0 pristine_git_object: aa5ac8a99a33d8c511f3d08de93e693bf75fb2a1 - docs/models/toolfilechunktype.md: - id: f895006e53e4 - last_write_checksum: sha1:258a55eef5646f4bf20a150ee0c48780bdddcd19 - pristine_git_object: 7e99acefff265f616b576a90a5f0484add92bffb docs/models/toolmessage.md: id: 0553747c37a1 - last_write_checksum: sha1:f35fa287b94d2c1a9de46c2c479dadd5dca7144d - pristine_git_object: fa00d666d6d2baea0aac10fcdeff449eb73c9d39 + last_write_checksum: sha1:ac61e644ba7c6da607cb479eafd1db78d8e8012e + pristine_git_object: 7201481e61e269b238887deec30c03f7e16c53d7 docs/models/toolmessagecontent.md: id: f0522d2d3c93 last_write_checksum: sha1:783769c0200baa1b6751327aa3e009fa83da72ee pristine_git_object: 5c76091fbd2c8e0d768921fab19c7b761df73411 docs/models/toolreferencechunk.md: id: 10414b39b7b3 - last_write_checksum: sha1:2e24f2331bb19de7d68d0e580b099c03f5207199 - pristine_git_object: 3020dbc96563e2d36941b17b0945ab1e926948f4 + last_write_checksum: sha1:ea3bdfc83177c6b7183ad51fddb2d15aee0f0729 + pristine_git_object: 49ea4ca7b05e5fcaaf914f781e3a28483199d82d docs/models/toolreferencechunktool.md: id: c2210d74792a last_write_checksum: sha1:368add3ac6df876bc85bb4968de840ac578ae623 pristine_git_object: 999f7c34885015a687c4213d067b144f1585c946 - docs/models/toolreferencechunktype.md: - id: 42a4cae4fd96 - last_write_checksum: sha1:43620d9529a1ccb2fac975fbe2e6fcaa62b5baa5 - pristine_git_object: bc57d277a39eef3c112c08ffc31a91f5c075c5a4 docs/models/tooltypes.md: id: adb50fe63ea2 last_write_checksum: sha1:f224c3d8732450b9c969b3e04027b7df7892694c @@ -1478,12 +1426,8 @@ trackedFiles: pristine_git_object: 1bc0189c5d1833c946a71c9773346e21b08d2404 docs/models/transcriptionsegmentchunk.md: id: f09db8b2273e - last_write_checksum: sha1:5387f2595d14f34b8af6182c34efac4874a98308 - pristine_git_object: 00a599ee8442f45ce4f529da18ad3e9486b12f9f - docs/models/transcriptionsegmentchunktype.md: - id: 01bda77a53f8 - last_write_checksum: sha1:63d511c2bd93bd477f1b7aae52954b28838316d9 - pristine_git_object: 2968fa26a2dd390b66974e6db57317616fb3b832 + last_write_checksum: sha1:d4a7ebd6a8cc512a0bd00a49af4130c533254b44 + pristine_git_object: d7672c0eebb55243965306c94a771aa18ed641d6 docs/models/transcriptionstreamdone.md: id: 2253923d93cf last_write_checksum: sha1:2a1910d59be258af8dd733b8911e5a0431fab5a4 @@ -1506,68 +1450,44 @@ trackedFiles: pristine_git_object: 63fcfbc63a65cdff4228601e8a46f9d003ec9210 docs/models/transcriptionstreamsegmentdelta.md: id: f59c3fb696f2 - last_write_checksum: sha1:4a031b76315f66c3d414a7dd5f34ae1b5c239b2e - pristine_git_object: e0143a39fb12a4a3efce3e1b250730d20cf21c7d + last_write_checksum: sha1:7d6999abf5a01fc01c0d5302acd3218e535adc9a + pristine_git_object: 1b652a3b6dc4406a3b7efa8a412b15ca0a5d765f docs/models/transcriptionstreamtextdelta.md: id: 69a13554b554 - last_write_checksum: sha1:de31f5585d671f85e6a9b8f04938cf71000ae3f7 - pristine_git_object: a4062171d7630bcea967a89d8df6cffd4908285f - docs/models/unarchiveftmodelout.md: - id: 4f2a771b328a - last_write_checksum: sha1:0b9ab5d6c7c1285712127cfac9e918525303a441 - pristine_git_object: 12c3d74534897129766397a44afee0f4dac91d9f - docs/models/unarchivemodelrequest.md: - id: e6922871c93a - last_write_checksum: sha1:591461141df5089e884a2db13bfaaef1def0748c - pristine_git_object: 033dad8a66969e2b920ec40391c38daa658c6f0e + last_write_checksum: sha1:d969f462034ed356f2c8713b601ee7d873d4ce07 + pristine_git_object: 77bd0ddcf8a1d95707fa9e041de3a47bb9e7f56d + docs/models/unarchivemodelresponse.md: + id: a690f43df567 + last_write_checksum: sha1:5c9d4b78c92d30bb4835cb724d1ea22a19bf5327 + pristine_git_object: 375962a7110f814288ea9f72323383bd8194e843 docs/models/updateagentrequest.md: id: 371bfedd9f89 - last_write_checksum: sha1:f9ebaa4650f77595fd554bb2711d4b869cba06cc - pristine_git_object: 358cb71d2ab7dfae85ac7768936910a976d2f644 - docs/models/updateagentversionrequest.md: - id: 706f66fb34eb - last_write_checksum: sha1:913a8105b77620d32147a00c1223ce5a117d2df2 - pristine_git_object: b83eb867a518d757b23d981c962f87a0e9c8a454 + last_write_checksum: sha1:97170995ed40391023f0dce5096cfebe83fa7dc8 + pristine_git_object: d3428d92a8f23670a6b587a6017a353d2c12a815 + docs/models/updateagentrequesttool.md: + id: bdf961d2c886 + last_write_checksum: sha1:5355f8c97b2aef98aebff251e1f4830ddbaa7881 + pristine_git_object: e358b1edb9035667104700dde890bb0b43074543 docs/models/updatedocumentrequest.md: id: ee4e094a6aa7 - last_write_checksum: sha1:4798ef091b5d045b0cda3d2a3cc40aef0fb3155c - pristine_git_object: fa5d117a4016208d81ad53f24daa4284b35152f8 - docs/models/updateftmodelin.md: - id: 1b98d220f114 - last_write_checksum: sha1:d1c7a8f5b32228d8e93ad4455fccda51b802f08f - pristine_git_object: 4e55b1a7d96e1ad5c1e65c6f54484b24cd05fcfc + last_write_checksum: sha1:4c4d774c67449402eb7e1476b9d0fef5b63f2b99 + pristine_git_object: 7e0b41b7be9f559b27a3430f46ed53d0453f6e03 docs/models/updatelibraryrequest.md: id: 2eda82f12f31 - last_write_checksum: sha1:cc1ca5b6f9bd4ab61e3983991f5656ff5ea22e8d - pristine_git_object: e03883cca75f3ed17fa3432e0abc2c892ec3d74a + last_write_checksum: sha1:436e08988daa8ca04ece36a4790ed84e0629b81a + pristine_git_object: aaffc5a9f0d588ff935db2ec2c079af9f162c2c3 docs/models/updatemodelrequest.md: id: 8eabdced3e0e - last_write_checksum: sha1:28765fe537adb34e5e2ef051cd1226bdcae8ea9f - pristine_git_object: 5799c63babcd9377c5024f584328c814c4401c04 - docs/models/updatemodelresponse.md: - id: 742d796d5be3 - last_write_checksum: sha1:2e09ab747fa3247486b25057e887baf0859c3a5b - pristine_git_object: 275ee77f111b926d681a446af9741001a1c88fa8 - docs/models/updateorcreatelibraryaccessrequest.md: - id: c95e6b3df38f - last_write_checksum: sha1:f957324978f18d9831dafe4d1a5d78f755f51ed6 - pristine_git_object: e04567b40d62e0d705096eedaba9fa84913f584d - docs/models/uploaddocumentrequest.md: - id: a211b5f814e4 - last_write_checksum: sha1:ce851cd52da0250c8d86f1346778edb0b5c97a50 - pristine_git_object: 92152b7f247ae4d7f8373e8b13ce947b7ca2cae7 - docs/models/uploadfileout.md: - id: c991d0bfc54c - last_write_checksum: sha1:ce5af8ffadb8443a6d1ca5fbbc014de42da35b9d - pristine_git_object: 6f09c9a6920f373c730fa3538b0c2953d757c257 + last_write_checksum: sha1:96879df11c005b591f2e59975897feff8fc8656e + pristine_git_object: 56b84c59c48ac135345394235c71ce77d384e33e docs/models/usageinfo.md: id: ec6fe65028a9 last_write_checksum: sha1:cf71fb9676d870eba7c4d10a69636e1db4054adc pristine_git_object: f5204ac94a4d6191839031c66c5a9bc0124a1f35 docs/models/usermessage.md: id: ed66d7a0f80b - last_write_checksum: sha1:627f88dbb89e226a7d92564658c23a0e8d71342a - pristine_git_object: 78ed066eed9f0638edc6db697eaeaad6f32b4770 + last_write_checksum: sha1:f0ed7d9cb7264f1d9e4a9190772df3f15e25346c + pristine_git_object: e7a932ed71496fa7cc358388c650d25f166f27a4 docs/models/usermessagecontent.md: id: 52c072c851e8 last_write_checksum: sha1:1de02bcf7082768ebe1bb912fdbebbec5a577b5a @@ -1584,82 +1504,82 @@ trackedFiles: id: ba1f7fe1b1a3 last_write_checksum: sha1:ef35648cec304e58ccd804eafaebe9547d78ddcf pristine_git_object: c73952d9e79ea8e08bc1c17817e74e3650def956 - docs/models/wandbintegrationout.md: - id: c1a0f85273d8 - last_write_checksum: sha1:ce7ffc6cc34931b4f6d2b051ff63e1ca39e13882 - pristine_git_object: a6f65667a6bcfb18b78f8f766ab71de84ca13ca7 + docs/models/wandbintegrationresult.md: + id: 729c2601b338 + last_write_checksum: sha1:49f442907815de4661a85a3521803d80b953a17e + pristine_git_object: d12bc31191ba534a9744d78f657c19e7f93f777a docs/models/websearchpremiumtool.md: id: 267988aa8c3f - last_write_checksum: sha1:f9b761d727cbe0c60a2d0800b0a93929c5c3f5e7 - pristine_git_object: 07b8b9265e01bd28b1c30fbc3f1283285e7d6edd + last_write_checksum: sha1:38f80a43f73a13ddedc7730f853c092a48b665f9 + pristine_git_object: 78b736cd314617caa0d77f3c42015212e37ab539 docs/models/websearchtool.md: id: fc4df52fb9b5 - last_write_checksum: sha1:047fd9f950d5a86cf42a8f3ac40f754b395e39ec - pristine_git_object: da5e7b7b600fa3fd0799e95e7a0f9507cd8456c3 + last_write_checksum: sha1:72636dc7ae74264bb5158d284ef6f83da5290b27 + pristine_git_object: 4ca7333c412ad819e3e02c61debe402e3f9b0af9 docs/sdks/accesses/README.md: id: 2ea167c2eff2 - last_write_checksum: sha1:200d509484a1a27fec893e15c39043a9deb140da - pristine_git_object: c1e3866d1a37e1596fa61538317eb68907cbaf57 + last_write_checksum: sha1:279d3b3a4f625b89b25e9a2a47886ac6008b3ca0 + pristine_git_object: c50456df9ea2bb71f78a83ad28f90e089d2e2cd7 docs/sdks/agents/README.md: id: 5965d8232fd8 - last_write_checksum: sha1:a655952f426d5459fa958fa5551507e4fb3f29a8 - pristine_git_object: cd3ec4c6c87f34c4d3634bf510534dff163d97de + last_write_checksum: sha1:a73ae6719acef32b47be55ea5c5684e91f7eda68 + pristine_git_object: 8a60837030b9e5dd0adca0d07d9f0266158b080f docs/sdks/batchjobs/README.md: id: a3b8043c6336 - last_write_checksum: sha1:eca07f3c47acbe42264d31fba982a49005a8c983 - pristine_git_object: 24316d78b1be51649d186db1479bbf74f00f87e6 + last_write_checksum: sha1:b4b3123ff210545048e2b0c729f2b7e5f7460f4e + pristine_git_object: 3633fe4ee136c1ac90f9446425f62a0d68fa4f90 docs/sdks/betaagents/README.md: id: 5df79b1612d8 - last_write_checksum: sha1:f2dbb543e7bd1db239ee801c55fa1f7f92ca6322 - pristine_git_object: 0ef655a348d7381aa0a7869a022b362d90497197 + last_write_checksum: sha1:9ec1c7a967bc653fe175a7986ddec74d5feb0714 + pristine_git_object: aaa5110e6db30f5450877b67d70d46e53b98996b docs/sdks/chat/README.md: id: 393193527c2c - last_write_checksum: sha1:908e67969e8f17bbcbe3697de4233d9e1dd81a65 - pristine_git_object: 6907c29d26b51fa7748b339cc73fd3d6d11a95a5 + last_write_checksum: sha1:5e7a43def5636140d70a7c781ed417e527ce9819 + pristine_git_object: 1bf4aeadc762f5d696c278eefaa759f35993e9d5 docs/sdks/classifiers/README.md: id: 74eb09b8d620 - last_write_checksum: sha1:f9cc75dbb32ea9780a9d7340e524b7f16dc18070 - pristine_git_object: 41b520812ac8a6031c0ab32aa771e9903fa24a97 + last_write_checksum: sha1:9f11740f8cf1a3af44fff15b63916305f1882505 + pristine_git_object: dc0f4984380b5b137266421e87a1505af5260e89 docs/sdks/conversations/README.md: id: e22a9d2c5424 - last_write_checksum: sha1:55b150757576819887075feac484ba76ae8abd59 - pristine_git_object: c0089f12b040f3686a584f1569ed4e0ab56c52fb + last_write_checksum: sha1:4c5f8ea93d560956cb23c26e0d5f6d7cbc129e07 + pristine_git_object: e77d329b735dc21f620470bcf82220a79bc34e18 docs/sdks/documents/README.md: id: 9758e88a0a9d - last_write_checksum: sha1:55280d8863200affd25a98d7493a0110c14baad3 - pristine_git_object: 97831f86223c6dbbaec35a240725a8c72e229961 + last_write_checksum: sha1:ac7ab2598066971e8b371a3e73aa266ec697df1b + pristine_git_object: 9c219b6709d5d5bfa28113efca92012e8c5a5112 docs/sdks/embeddings/README.md: id: 15b5b04486c1 - last_write_checksum: sha1:46e57c7808ce9c24dd54c3562379d2ff3e0526e8 - pristine_git_object: 0be7ea6dcace678d12d7e7e4f8e88daf7570df5d + last_write_checksum: sha1:76cb4876eebccfd2ab9a10a1b25570477a96a5c1 + pristine_git_object: eecb5c9e991dcd2fd5c1f0688efe3b64b4c6de3b docs/sdks/files/README.md: id: e576d7a117f0 - last_write_checksum: sha1:92558cd6688432150cc433391e2b77a328fa3939 - pristine_git_object: ae29b7bf9383f534b2ca194ec5ff261ff17b5fb6 + last_write_checksum: sha1:f5861c42227b901742fd8afe7155ed6d634b1b4c + pristine_git_object: 9507326be83eaf750daa12c0b1421d819b72340d docs/sdks/fim/README.md: id: 499b227bf6ca - last_write_checksum: sha1:34ff7167b0597bf668ef75ede016cb8884372d1b - pristine_git_object: 3c8c59c79db12c916577d6c064ddb16a511513fd + last_write_checksum: sha1:5b2ce811df8d867d14fe0126f2c9619cca779f56 + pristine_git_object: 49151bf5be49ce6554679bc5c30906894a290ecb docs/sdks/finetuningjobs/README.md: id: 03d609f6ebdd - last_write_checksum: sha1:206624c621a25836333f4c439e0247beb24a7492 - pristine_git_object: fe18feeb640804d9308e6fefe9b5f2371d125f9b + last_write_checksum: sha1:2d7ff255c1462d5f1dff617a1993e730ec3911ea + pristine_git_object: 4262b3a9833180ce86da43a26ee7ab27403f2cd0 docs/sdks/libraries/README.md: id: df9a982905a3 - last_write_checksum: sha1:1c623647aa7b834a844e343c9e3fe0763c8445a5 - pristine_git_object: 8835d0ec8cbabcb8ab47b39df982a775342c3986 + last_write_checksum: sha1:e3eb0e9efb3f758fdf830aa1752c942d59a4f72b + pristine_git_object: 7df1ef4e26449af572412f052ee7ad189039544f docs/sdks/models/README.md: id: b35bdf4bc7ed - last_write_checksum: sha1:2410579fd554ad1e5734cc313d0a75eeb04a1d14 - pristine_git_object: 0cbf1bdde52d1a52c1329ecd1116718237be5152 + last_write_checksum: sha1:2aa91ffe637c049aed0d63d24ac39688b6ecb270 + pristine_git_object: 311a2db6e213902ac5a2c27acf19f856dae2c264 docs/sdks/ocr/README.md: id: 545e35d2613e - last_write_checksum: sha1:a8d22a86b79a0166ecec26a3e9379fa110d49b73 - pristine_git_object: 9fd9d6fc14c5874dbb819239ea677a171a26969b + last_write_checksum: sha1:da377d75b6b7480c335d7f721bb06fe11492be38 + pristine_git_object: fde2a82339e10c74aca6d1b4168b62501d7bbf83 docs/sdks/transcriptions/README.md: id: 089cf94ecf47 - last_write_checksum: sha1:493070fcce7cec1a627b04daa31c38a6745659e7 - pristine_git_object: 9691b81d3a7eb27d7b2b489408d32513859646c9 + last_write_checksum: sha1:15d118796f147bc5b0bf4146ba39bfa9edfbc996 + pristine_git_object: 97703c9b4dc942385ee04ae96cbd100c3f632a17 py.typed: id: 258c3ed47ae4 last_write_checksum: sha1:8efc425ffe830805ffcc0f3055871bdcdc542c60 @@ -1670,8 +1590,8 @@ trackedFiles: pristine_git_object: c35748f360329c2bc370e9b189f49b1a360b2c48 src/mistralai/client/__init__.py: id: f1b791f9d2a5 - last_write_checksum: sha1:fcca936cb62cc76d57372d5bd5735877b79b53a4 - pristine_git_object: 481fc91604c413966c8510d8341edaa3355fc276 + last_write_checksum: sha1:c05dc9845d3361c4aae7796b079ac0e7952e8606 + pristine_git_object: 4b79610a3fc8222fc8f9adeeaf798e894708fc06 src/mistralai/client/_hooks/__init__.py: id: cef9ff97efd7 last_write_checksum: sha1:9a6f060871150610f890cc97676c3afe9050b523 @@ -1686,156 +1606,236 @@ trackedFiles: pristine_git_object: 036d44b8cfc51599873bd5c401a6aed30450536c src/mistralai/client/_version.py: id: cc807b30de19 - last_write_checksum: sha1:dd6d1521f7ecfc56be58eafc1709873a04d27fb0 - pristine_git_object: 814d9ec74a37ae50f106ea07b3c174e65685521b + last_write_checksum: sha1:03563b818feb27386f7d6a0321a3875e3024a2d2 + pristine_git_object: 1a4d15d66f45d13c7f9cae550138390b5cf5897e src/mistralai/client/accesses.py: id: 76fc53bfcf59 - last_write_checksum: sha1:16574ca54176ec30b236ab1a4694f57a6314db43 - pristine_git_object: cda484c8feade66829dad587f5f397aa89d4fb6f + last_write_checksum: sha1:ed94623aa8a2bd502572a699a2f54c9281ec283e + pristine_git_object: 0761b0bc6080ab0d891be70089a1908d435559fa src/mistralai/client/agents.py: id: e946546e3eaa - last_write_checksum: sha1:3b46ac68d37563a9eb988ad2978083e40cf4513d - pristine_git_object: 0942cb20173f0b2e3f828f5857e3aa221f65bc1b + last_write_checksum: sha1:7049cab7c308888c88b0341fb29f0132e154e3cb + pristine_git_object: 2b70d1520663d999773159d89b1f9dc96f7fbf97 src/mistralai/client/audio.py: id: 7a8ed2e90d61 last_write_checksum: sha1:e202d775d24c0303053e0548af83fcb04e2748f4 pristine_git_object: f68f063c08a099d07904456daa76d8e2d2ecdbe6 src/mistralai/client/basesdk.py: id: 7518c67b81ea - last_write_checksum: sha1:795253524d0911d227b934978bdacb84619177a3 - pristine_git_object: 611b40597b42ac309871681b38a3b3c249cbe494 + last_write_checksum: sha1:2cea76931db51175b2c787d0c707f08e9944c22f + pristine_git_object: a976121bd224d64497e5006cb58dd728f6a67144 src/mistralai/client/batch.py: id: cffe114c7ac7 last_write_checksum: sha1:b452983f67b33f26e1faa60fdbbb171cb1877224 pristine_git_object: 7e36fd0d73ebeb873f74f4109896a6cf3bb7d2ba src/mistralai/client/batch_jobs.py: id: 3423fec25840 - last_write_checksum: sha1:eb1baade19f5da3dd815ebfbabccca139eb7b25d - pristine_git_object: 752c76524a4fa19ed1654943218ca5182d563ca3 + last_write_checksum: sha1:34de0e986e7c0e4377f70125d319e522280c565f + pristine_git_object: 0e135b30cd122d1a813ee67bf2f9037953448e73 src/mistralai/client/beta.py: id: 981417f45147 last_write_checksum: sha1:85f42fc6c2318eef94c90405b985120220c9c617 pristine_git_object: 65b761d18f7274cc33162a83efa5b33211f78952 src/mistralai/client/beta_agents.py: id: b64ad29b7174 - last_write_checksum: sha1:227c2ef3812c06e4a813063bf9d2282ce0884ecd - pristine_git_object: 4e692f17579635d5f0cc03f86b8158b3344ae87f + last_write_checksum: sha1:7c900a6b1483108a367050440667c069b08fbb92 + pristine_git_object: 157c5de4c66273e6df468f8a12b4399f9efb32fb src/mistralai/client/chat.py: id: 7eba0f088d47 - last_write_checksum: sha1:6f052ac3117829b16906a4e1cbfa5b1f7ab104fd - pristine_git_object: 35698d32ac870f4b59c03f02700f20c04b14462d + last_write_checksum: sha1:520b0da011d63c60bd0d3a960a410a8f4a6a3e22 + pristine_git_object: 13b9c01f035c4fd6f60b78f20a1801bedf3b582b src/mistralai/client/classifiers.py: id: 26e773725732 - last_write_checksum: sha1:abd5033ee390fdeddfa4af918cc44f6210a2a6a0 - pristine_git_object: 3407c4b77db429535465f29754a2da8145d6a5fe + last_write_checksum: sha1:ee94a4e50cda893f9c19c2304adda8b23fc2de9e + pristine_git_object: 67199b601e38dff6fc6a4317eb845fbde6c25de0 src/mistralai/client/conversations.py: id: 40692a878064 - last_write_checksum: sha1:6e81283d3d5db5dd554af68d69313951cf5f4578 - pristine_git_object: 646b91f3980bbe9be01078162d5b4ad9afb141b9 + last_write_checksum: sha1:1101b9e374010ba9cb080c30789672cfcfc45c55 + pristine_git_object: ec33b1fb12d1923ef5f686ed09c5fe5ae889e40c src/mistralai/client/documents.py: id: bcc17286c31c - last_write_checksum: sha1:9ae89ef80a636b55ba4cdc3ad6c77c47c1824433 - pristine_git_object: c78f2944edaac77864ff6c4dd8d19d3aab3f0cb6 + last_write_checksum: sha1:37669f51eba1b352a5e3c7f3a17d79c27c7ea772 + pristine_git_object: b3130364c0f3cc90ed1e4407a070bd99e3cce606 src/mistralai/client/embeddings.py: id: f9c17258207e - last_write_checksum: sha1:7cd6d848ed8978637988d9b7e1a7dd92dac5eb3b - pristine_git_object: 4a056baa014217927412e9dd60479c28de899e2e + last_write_checksum: sha1:0fbf92b59fde3199c770a522ead030f8fa65ff5c + pristine_git_object: 5f9d3b9cb611943e509caeda9ddd175e3baee2c3 + src/mistralai/client/errors/__init__.py: + id: 0b2db51246df + last_write_checksum: sha1:0befddc505c9c47388683126750c7ad0e3fbef52 + pristine_git_object: 58a591a1cc2896f26df2075ffca378ca6c982d1e + src/mistralai/client/errors/httpvalidationerror.py: + id: ac3de4a52bb6 + last_write_checksum: sha1:73251adb99a07d11b56d0bc0399a2362ff9ccdba + pristine_git_object: 97b165629c39ab6e24406eb3f13970414b73f8f7 + src/mistralai/client/errors/mistralerror.py: + id: d1f57f0ff1e9 + last_write_checksum: sha1:30065cdd7003ec02cb3463d7c63229c4ff97503c + pristine_git_object: eb73040c5b5251018695204fde80eac914b35dae + src/mistralai/client/errors/no_response_error.py: + id: 8b469ecb0906 + last_write_checksum: sha1:0b3fdb1136472c41a4a739a5cbf9e2a4ce0c63a4 + pristine_git_object: d71dfa7b24146f1390ac6830e61acf337b99ca83 + src/mistralai/client/errors/responsevalidationerror.py: + id: 6cfaa3147abe + last_write_checksum: sha1:6862d178d4d1964bc03db47b76709aa406546981 + pristine_git_object: a7b3b9f0207846b5f176076b9f400e95cb08ebb9 + src/mistralai/client/errors/sdkerror.py: + id: c489ffe1e9ca + last_write_checksum: sha1:f708168e46c2960dd51896083aee75ccdb36f9dd + pristine_git_object: 25b87255a51021079f8ba5cc60b43509e12f9a4d src/mistralai/client/files.py: id: f12df4b2ce43 - last_write_checksum: sha1:aa647afa486bbed48083c0b1ec954bdc5cfd0280 - pristine_git_object: 57d389f1e245f5768fe9e8991f65229dd4bd608d + last_write_checksum: sha1:a16c8702d15339200b09c62948c06f79e720d79c + pristine_git_object: a5f3adf6dd9b60a202c70edf7d2a148a626ce471 src/mistralai/client/fim.py: id: 217bea5d701d - last_write_checksum: sha1:90cacb025a1a1fb81e619d59819c0a652f4a5efa - pristine_git_object: be3f7742b866ac58b7bbb65e3593e9865dee134f + last_write_checksum: sha1:dc427c9e954dfb9a7fe2df8b5c544877a28cdc73 + pristine_git_object: 8ffb7730a03398322dfdd6c83724096d4924c5c5 src/mistralai/client/fine_tuning.py: id: 5d5079bbd54e last_write_checksum: sha1:fe1f774df4436cc9c2e54ed01a48db573eb813cd pristine_git_object: df6bc5643a13294ddfbeecc6ae84d00cd7199bed src/mistralai/client/fine_tuning_jobs.py: id: fa1ea246e0b2 - last_write_checksum: sha1:edfe25f99047d4cbd45222cd23823c782286a2c8 - pristine_git_object: 9a28ded152a4f4a5b625a97e087aebc5a287d71e + last_write_checksum: sha1:8cbf3827f5c2e43170192de39be498af0bf24cf0 + pristine_git_object: c2ee871bb1ccf7e3e24081121a7e54f1483eee5c src/mistralai/client/httpclient.py: id: 3e46bde74327 last_write_checksum: sha1:0f4ecc805be1dc3d6e0ca090f0feb7d988f6eb9d pristine_git_object: 544af7f87d6b7097935290bebd08e30e5f485672 src/mistralai/client/libraries.py: id: d43a5f78045f - last_write_checksum: sha1:b3fd0348f4f56aab9873d09c45ed9575baf6e7c3 - pristine_git_object: 26ceabe19a340b7fd4dbb74aebab62bc45093ae5 + last_write_checksum: sha1:6440b3df71fe557ecba5c23768d115efd4ceb26f + pristine_git_object: b8728362b87349118ac6f163f50613dd18c43340 src/mistralai/client/models/__init__.py: id: e0e8dad92725 - last_write_checksum: sha1:d047eab2a2a8ee5af65ed19055a0a3e3092ad2c5 - pristine_git_object: 093ffcbdb0b57458cf856f585e6637d7d5955e8d + last_write_checksum: sha1:50727667552480e8298431f5a3dcc78457c53331 + pristine_git_object: 5ef8b3f3dd9fbb32d4675f7e11808c29fc218c57 src/mistralai/client/models/agent.py: id: 1336849c84fb - last_write_checksum: sha1:d41a96558ddbd52b6c71d316c291847bb6131a01 - pristine_git_object: 05ae24cde5149e30004b7cd4a2409c753682be56 + last_write_checksum: sha1:6090ddf2b5b40656dfbf3325f1022a40ae418948 + pristine_git_object: 686a6eb84ecd27e725e3773b3f7773dddac1c10c src/mistralai/client/models/agentaliasresponse.py: id: 3899a98a55dd last_write_checksum: sha1:d7e12ea05431361ad0219f5c8dee11273cd60397 pristine_git_object: 6972af2a4ae846e63d2c70b733ecd6c8370ee0cd src/mistralai/client/models/agentconversation.py: id: 1b7d73eddf51 - last_write_checksum: sha1:bc2f1a3710efc9c87d6796ccce953c9ce9cf3826 - pristine_git_object: a850d54c64de0c84ad4ea2b11ea1a828eb2580c4 - src/mistralai/client/models/agentcreationrequest.py: - id: 35b7f4933b3e - last_write_checksum: sha1:d3f61940b4cccfc9c13860844f4115e60b095823 - pristine_git_object: 898d42a9c16ffe893792e14445e9ebfcbd046ba3 + last_write_checksum: sha1:28718fb00dbe74241712b4f7a3fbce2d060f7e86 + pristine_git_object: da30c6634294cdaba459b68ca8877d867ee052fb src/mistralai/client/models/agenthandoffdoneevent.py: id: 82628bb5fcea - last_write_checksum: sha1:537e9f651de951057023d3712fa1820da17a21b4 - pristine_git_object: 40bf84970e1d245c3c7fbad64d73f648f8287438 + last_write_checksum: sha1:829c5a152e6d737ffd65a3b88b0b2890e6703764 + pristine_git_object: e2609e3d1fb62b132eb53112eb2bdc4ae855085f src/mistralai/client/models/agenthandoffentry.py: id: 5030bcaa3a07 - last_write_checksum: sha1:afe800c64c74aa79fceda4e4ce808f67573edbc7 - pristine_git_object: b18fe17c70d561b926bdac04124ebca8fc1cca0b + last_write_checksum: sha1:c9544755ad6d3a3831f8afe446c6a9a523eb5137 + pristine_git_object: f92ef2cc7310d5df94436f3067a640d3848405f0 src/mistralai/client/models/agenthandoffstartedevent.py: id: 2f6093d9b222 - last_write_checksum: sha1:933f8be5eacd86881a42cfb83612f327caa77ee7 - pristine_git_object: e278aef39d3bc5e158a094c593391fa8ad77c320 + last_write_checksum: sha1:c9f86e01497c53f3c1806dbb9fdff6e2d9993323 + pristine_git_object: 2a4023419212fec8b3f0e83d506a25b17408a8b1 + src/mistralai/client/models/agents_api_v1_agents_create_or_update_aliasop.py: + id: 23a832f8f175 + last_write_checksum: sha1:237d6b4419615c9c26f96d49760732bd7b4617e7 + pristine_git_object: 04761ae786c35e6fa6cd5a896a5e52458cb3a5d5 + src/mistralai/client/models/agents_api_v1_agents_delete_aliasop.py: + id: 9c9947e768d3 + last_write_checksum: sha1:385faebecef8479d1a72a7ab6f15ddcc611dad87 + pristine_git_object: 291a9802a7d49108fc0d428610cb4c37b42f0796 + src/mistralai/client/models/agents_api_v1_agents_deleteop.py: + id: 95adb6768908 + last_write_checksum: sha1:f222a61a73ba2f37051fffbf2d19b3b81197d998 + pristine_git_object: 5e41fdcdbf182e993acd71603ecb8c9a14e48043 + src/mistralai/client/models/agents_api_v1_agents_get_versionop.py: + id: ef9914284afb + last_write_checksum: sha1:c99ee098f659a56cb365c280cc29de441916b48a + pristine_git_object: 941863d0f8143020200bb5566ce66d527c4369c8 + src/mistralai/client/models/agents_api_v1_agents_getop.py: + id: f5918c34f1c7 + last_write_checksum: sha1:b90285965e2aaccaf989e59b8f1db4a53ae8b31c + pristine_git_object: dd17580dd0041a979fc6c9c7349d14a3e200f5d3 + src/mistralai/client/models/agents_api_v1_agents_list_version_aliasesop.py: + id: a04815e6c798 + last_write_checksum: sha1:b4b5c4e8566f1d0c68a14aba94b7ffea257fd7ce + pristine_git_object: bb1da6020386fabfbd606db9a098a0e9323ce3b0 + src/mistralai/client/models/agents_api_v1_agents_list_versionsop.py: + id: 19e3310c3907 + last_write_checksum: sha1:6628e9ff747c579e11fa9a756cee3b11c57c476d + pristine_git_object: 54b62e90e23c1782a0b068460d6877cac3b28916 + src/mistralai/client/models/agents_api_v1_agents_listop.py: + id: 25a6460a6e19 + last_write_checksum: sha1:0abe889b85470b28917368a2b958a13303bd38f1 + pristine_git_object: 97b1c7f1a070be5e12e1a32ad56dbcfcb0f1cd68 + src/mistralai/client/models/agents_api_v1_agents_update_versionop.py: + id: 63f61b8891bf + last_write_checksum: sha1:e9046cf75e008e856f00dda8725cbb16d83cd394 + pristine_git_object: 5ab821ea413d656dc7194f3588c8987c3e720831 + src/mistralai/client/models/agents_api_v1_agents_updateop.py: + id: bb55993c932d + last_write_checksum: sha1:bc922e15651d7bb33b841d9b3ae247843b6a5426 + pristine_git_object: 69da5001007916e458cab6caf8c10073c8fbc7d6 + src/mistralai/client/models/agents_api_v1_conversations_append_streamop.py: + id: ec00e0905f15 + last_write_checksum: sha1:d0a253c2f383241378e6fab35a38427d0a1dd827 + pristine_git_object: d257dc789cdc4f57bb91d1788335d2d49442d02f + src/mistralai/client/models/agents_api_v1_conversations_appendop.py: + id: 39c6125e850c + last_write_checksum: sha1:864ece4ddcd65075547daa1ab996ba7cfe9939fc + pristine_git_object: 61fec0834e6e05a56a7ee5c984fb0401f9c72f5c + src/mistralai/client/models/agents_api_v1_conversations_deleteop.py: + id: 0792e6abbdcb + last_write_checksum: sha1:9725fce86a52b4995a51e1995ca114c0c4b414df + pristine_git_object: 499645a77782e29db61e439060340fee787799c1 + src/mistralai/client/models/agents_api_v1_conversations_getop.py: + id: c530f2fc64d0 + last_write_checksum: sha1:241e5a07f37fa88f1e5011615b3e2b47a1aaf6a7 + pristine_git_object: 504616abbf0c9d0595f2aae81c59e52352cee323 + src/mistralai/client/models/agents_api_v1_conversations_historyop.py: + id: 2f5ca33768aa + last_write_checksum: sha1:fccc3e1a3f48eff31463829037a440be667a7da1 + pristine_git_object: ef0a4eb084de52d4bde435ee9751aaa12e61dcc3 + src/mistralai/client/models/agents_api_v1_conversations_listop.py: + id: 936e36181d36 + last_write_checksum: sha1:e3e52cf7967b9b78099db9449cb33e3ded34d111 + pristine_git_object: 8bf66aea23f16734c1f9e03629aaf7246e4e60b4 + src/mistralai/client/models/agents_api_v1_conversations_messagesop.py: + id: b5141764a708 + last_write_checksum: sha1:17fd503da7fb20198792c6e25f94dcc0a1e5db05 + pristine_git_object: 19978a194e2dd633fe89bcee7ceac177fcdd6629 + src/mistralai/client/models/agents_api_v1_conversations_restart_streamop.py: + id: c284a1711148 + last_write_checksum: sha1:2e462249c8ab71376c5f6179a2c033e254165f3e + pristine_git_object: 63c744498dfbdd254f2e780d90a680b10100ee63 + src/mistralai/client/models/agents_api_v1_conversations_restartop.py: + id: 3ba234e5a8fc + last_write_checksum: sha1:5dd06d300dbe8832b72d868657dc4c58f0ebaad5 + pristine_git_object: 3186d5df9000d4a62c0fbc64a601e6b709803deb src/mistralai/client/models/agentscompletionrequest.py: id: 3960bc4c545f - last_write_checksum: sha1:ee1e60d894d3a9277c1a3970c422483ffa502e21 - pristine_git_object: f4a2d646927c8c0f250507f52c5e7515830759ad + last_write_checksum: sha1:5d81a0421184ed547208e8ea7cff47b18fc00788 + pristine_git_object: 6955f6acb023fd842d9ec46a694d270a66911c0e src/mistralai/client/models/agentscompletionstreamrequest.py: id: 1b73f90befc2 - last_write_checksum: sha1:3bc4976eeda6d9b30bba72e7f7c417ca9ba885c5 - pristine_git_object: 732e2402190d40bc5360868d3048d57fff9e7b55 - src/mistralai/client/models/agentupdaterequest.py: - id: 2d5a3a437819 - last_write_checksum: sha1:4a0ef549756904749a36b580cc2296a6a54d6647 - pristine_git_object: 96e209d41b638002f129ec4c13748082ccc3a8db + last_write_checksum: sha1:b46298a653359bca205b6b1975bcd1909e563dff + pristine_git_object: c2cf35522236f29ca1b9f2a438dfc79a59ca3e2a src/mistralai/client/models/apiendpoint.py: id: 00b34ce0a24d last_write_checksum: sha1:733e852bf75956acd2c72a23443627abfa090b7b pristine_git_object: a6665c1076f05c28936510c24ee7d3498d7e7a24 - src/mistralai/client/models/appendconversationop.py: - id: 1c47dd1e7c7e - last_write_checksum: sha1:109ced509e3caa5e5c9610b3a18839d113be708a - pristine_git_object: 710b8e1ca3fbfbb747e48d7699588bc199a41274 - src/mistralai/client/models/appendconversationstreamop.py: - id: 1ab08b189e9d - last_write_checksum: sha1:edd2a91da29f83646538b57e4d29f006d31f9dec - pristine_git_object: 55efca0e32c5d54d100563500aee9b61952d07c2 - src/mistralai/client/models/archiveftmodelout.py: - id: bab499599d30 - last_write_checksum: sha1:92f5b5a89ae5e52523d30069629e0ac8dc858d6b - pristine_git_object: 3107116c4a2c78c591999b220349325612a19b4e - src/mistralai/client/models/archivemodelop.py: - id: beefa1df3b7c - last_write_checksum: sha1:6f78b2f84f42267d4928a5a3ad1d3d3cae417cac - pristine_git_object: 30b4a9bd71f349cc4ab4b12df73770d327008527 + src/mistralai/client/models/archivemodelresponse.py: + id: 2d22c644df64 + last_write_checksum: sha1:d0f67fd2bc5a6e8de4f2b0a5742ceb4a1f7f5ab8 + pristine_git_object: f1116850c8bf0159c4146d4973988ea5d0fe7de7 src/mistralai/client/models/assistantmessage.py: id: 2b49546e0742 - last_write_checksum: sha1:a58ecb7bc381af02d83247f0518a3d34013b4575 - pristine_git_object: 5a4a2085e838196d3ab2b1c00bbeb7a78516dfb2 + last_write_checksum: sha1:dcfa31c2aac95a0d7bd748e96bd87a5c85c0d1f6 + pristine_git_object: 26a778c70439d21b890e85f2c85dbb560e8bffef src/mistralai/client/models/audiochunk.py: id: ce5dce4dced2 - last_write_checksum: sha1:8218d4c7118f677f16a3a63c55348c40d3ab3156 - pristine_git_object: a51868279b9b4ce2d97990286512d69f8d7f2e82 + last_write_checksum: sha1:d3c2e28583d661a9583c40c237430a1f63ea7631 + pristine_git_object: 68866cd2c3c640cf56258f2f98b8a2385ea6fcdb src/mistralai/client/models/audioencoding.py: id: b14e6a50f730 last_write_checksum: sha1:92ca06dce513cd39b2c7d9e5848cf426b40598ce @@ -1846,84 +1846,68 @@ trackedFiles: pristine_git_object: fef87ae76b31128ebd5ced4278e274c249181c23 src/mistralai/client/models/audiotranscriptionrequest.py: id: e4148b4d23e7 - last_write_checksum: sha1:6d7b267bc241c1f72b5b7839d6e2ad76a4c1ecff - pristine_git_object: 8c47a83cada33d8dbd4a9ffdedb55d3f4f55dadf + last_write_checksum: sha1:a6ef85be4ae24aa79c8c3fa9dcaf055e0ba9b266 + pristine_git_object: fe4c79e3427fae3e022bd936236d2934eaa76b60 src/mistralai/client/models/audiotranscriptionrequeststream.py: id: 33a07317a3b3 - last_write_checksum: sha1:66ae6146b9b75600df720054ec2c21e4e996b1fe - pristine_git_object: a080cee24c0d67c38fc6785c440418580e097700 + last_write_checksum: sha1:6e648ff58a70a0a3bd63a24676122b80eba4baf7 + pristine_git_object: 2d1e9269b51d84cd8b21643fe04accd00839b013 src/mistralai/client/models/basemodelcard.py: id: 556ebdc33276 - last_write_checksum: sha1:94871ce94c92fbbcff9fa5d6a543c824b17ee13b - pristine_git_object: 17a3e5c93339082f408f9ab5f34b5a01e24c74e0 + last_write_checksum: sha1:e2c3d1effee5b434fea9b958c0dd54fa96143924 + pristine_git_object: 9c9e9a2045a10f4606f11ee5886a19ccf03bbf0e src/mistralai/client/models/batcherror.py: id: 1563e2a576ec - last_write_checksum: sha1:9b59325428680d58151696c1738ad5466d67a78d - pristine_git_object: c1bf722a45c4326f24f7fd34ea536d59c48c67f2 - src/mistralai/client/models/batchjobin.py: - id: 72b25c2038d4 - last_write_checksum: sha1:667020377b2ca85dfd9c5aed96c7d4ba2571093b - pristine_git_object: a0c3b9146130a5ebfdbd0ec6338092bacc01bf85 - src/mistralai/client/models/batchjobout.py: - id: cbf1d872a46e - last_write_checksum: sha1:9031bc5ff1986ddc283551f7f5d210c9de67cc56 - pristine_git_object: 99c2b95118364d660f0cefde16507a83e8c9cafe - src/mistralai/client/models/batchjobsout.py: - id: 20b2516e7efa - last_write_checksum: sha1:426287f6ef9ed88e75f9e318582627d066f9e4f0 - pristine_git_object: f65fc040a964c68c82b5df7d3fb9e40222182322 + last_write_checksum: sha1:51c9e9a4d306c2de45dc0879ade62daed3fc2972 + pristine_git_object: 8a353cd2dc06a8c6f2db3d6b613cfdca8278f57e + src/mistralai/client/models/batchjob.py: + id: 85cd28932cc7 + last_write_checksum: sha1:532a8c6ca8546052159e5e5174cf65ce17a62f3f + pristine_git_object: 80acac336883c23b621d0dc647fef20548bf061a src/mistralai/client/models/batchjobstatus.py: id: 61e08cf5eea9 last_write_checksum: sha1:78934183519948464385245cbc89efb68ac00bfb pristine_git_object: bd77faa2fbed74b19a8d3884af6d43bc1b4806e0 src/mistralai/client/models/batchrequest.py: id: 6f36819eeb46 - last_write_checksum: sha1:115df324d1fec164bae60bf4b37acfa5149b3172 - pristine_git_object: 41c4523456398b302e0b7eb35824efc014f03aa6 + last_write_checksum: sha1:b2a71163e37a9483e172dc13b6320749bee38f2f + pristine_git_object: 911a9a0554b9b8cb6dedcb3a86a06c39890b875e src/mistralai/client/models/builtinconnectors.py: id: 2d276ce938dc last_write_checksum: sha1:4ceb3182009b6535c07d652ccf46661b553b6272 pristine_git_object: ecf60d3c1a83028d9cf755d4c9d5459f6b56e72a - src/mistralai/client/models/cancelbatchjobop.py: - id: cebac10b56a9 - last_write_checksum: sha1:2614180488e51c0e701fffdb058b39892c5bc1e5 - pristine_git_object: cd94ee86467247fe2bc7f7381fa05b57bedabef1 - src/mistralai/client/models/cancelfinetuningjobop.py: - id: c9a1b39f0d02 - last_write_checksum: sha1:139d3c443678aeeb8afedea8b2a783210e5ac28c - pristine_git_object: ddd445bb433df9a0f987693d97088d79e5e8c47f src/mistralai/client/models/chatclassificationrequest.py: id: afd9cdc71834 - last_write_checksum: sha1:91f62e46c415a0168442695f61cb30756227ed1a - pristine_git_object: 8b6d07b906c688a3849b8a4576cc10e075a6868f + last_write_checksum: sha1:a29088359142ebd6409f45569168b2096014119e + pristine_git_object: cf2aa78af3ffc747d557422b83551075b83e601d src/mistralai/client/models/chatcompletionchoice.py: id: 7e6a512f6a04 last_write_checksum: sha1:de0281a258140f081012b303e3c14e0b42acdf63 pristine_git_object: 2c515f6e9a290ebab43bae41e07493e4b99afe8f src/mistralai/client/models/chatcompletionrequest.py: id: 9979805d8c38 - last_write_checksum: sha1:95c0879e52d8b6c1ff389a5dfe1776129c764c00 - pristine_git_object: 4f7d071b5a0b84ef27397b4acaf4a798b6178eb8 + last_write_checksum: sha1:1f0390718ab06126a05e06797ef6af310ccab543 + pristine_git_object: e871bd92733ac400fdfeb2cf4f66fc32a7584103 src/mistralai/client/models/chatcompletionresponse.py: id: 669d996b8e82 last_write_checksum: sha1:97f164fea881127ac82303e637b6a270e200ac5b pristine_git_object: 7092bbc18425091d111ec998b33edc009ff0931b src/mistralai/client/models/chatcompletionstreamrequest.py: id: 18cb2b2415d4 - last_write_checksum: sha1:12e794c89a954702c3d4dccddad9b365331bd996 - pristine_git_object: ec7d2ae131cf5fac7eb618bbe09340ac23d444ef + last_write_checksum: sha1:c197792ed1dd78159ab0b970f8f76087ff2c4d6b + pristine_git_object: b7b2bff138cee9c130fa01d6157d8b6c21ea5a9c src/mistralai/client/models/chatmoderationrequest.py: id: 057aecb07275 - last_write_checksum: sha1:e18a5ae518f5413b1bff45f85f823b60e00ef32a - pristine_git_object: a8d021e8deb2015470765340281789a7fba544aa - src/mistralai/client/models/checkpointout.py: - id: 3866fe32cd7c - last_write_checksum: sha1:5ed4988914acef48854337127c4ca51791de3ab9 - pristine_git_object: 3e8d90e920cd34ff611f5e875c0163e1a4087f6f + last_write_checksum: sha1:7677494c0e36ccbc201384cb587abeb852a1a924 + pristine_git_object: 228e7d26b8b172c3e11f01d4f260bf6e5195b318 + src/mistralai/client/models/checkpoint.py: + id: 1a530d3674d8 + last_write_checksum: sha1:418f08c61b64fa7ffb053c6f5912e211acab1330 + pristine_git_object: c24e433eb4787146620fb48b6d301f51a4db5067 src/mistralai/client/models/classificationrequest.py: id: 6942fe3de24a - last_write_checksum: sha1:c98f6751aeba813b968aaf69c3551972b94da4c8 - pristine_git_object: 903706c31176da4c2ab021b3bcaeb2217ca98f76 + last_write_checksum: sha1:7bd416d4b0e083efbf9324107263027140702ddb + pristine_git_object: 25b6941355cb9629abb9c0f09fb6fd191c56ffa6 src/mistralai/client/models/classificationresponse.py: id: eaf279db1109 last_write_checksum: sha1:64522aa2b0970e86a0133348411592f95163f374 @@ -1932,222 +1916,194 @@ trackedFiles: id: 2445f12b2a57 last_write_checksum: sha1:2b8b9aeadee3b8ffe21efd1e0c842f9094c4ecc7 pristine_git_object: 6c7d6231d211977332100112900ea0f8cdf5d84c - src/mistralai/client/models/classifierdetailedjobout.py: - id: d8daeb39ef9f - last_write_checksum: sha1:1b6dde6554e51d9100f2e50779eff56b3ca07603 - pristine_git_object: bc5c5381d61b6b4945b51dc9836bcc2e7aa66f9f - src/mistralai/client/models/classifierftmodelout.py: - id: 2903a7123b06 - last_write_checksum: sha1:5141a0c29da0739057c52b2345a386c79d6f8f85 - pristine_git_object: 182f4954c2b3f1408cb05eee76e2bf24005b023e - src/mistralai/client/models/classifierjobout.py: - id: e19e9c4416cc - last_write_checksum: sha1:c5daf7e879911ea24fba847a1c12ab9774ebbe98 - pristine_git_object: 03a5b11c46097733d609f3b075b58ef729f230a5 - src/mistralai/client/models/classifiertargetin.py: - id: ed021de1c06c - last_write_checksum: sha1:8a1db343861e4f193a56d4030862c1f3a361d3e1 - pristine_git_object: b250109bd03976c93c571dbbacb1c631acd19717 - src/mistralai/client/models/classifiertargetout.py: - id: 5131f55abefe - last_write_checksum: sha1:304408da049ff4ad17f058267ffaa916ef907dc2 - pristine_git_object: 3d41a4d9c887488e7b08cc9d5d8dcb5b0fd26781 + src/mistralai/client/models/classifierfinetunedmodel.py: + id: 5a9a7a0153c8 + last_write_checksum: sha1:853bf1b3b941ec3aebeb17ac2caf38fa0dd094de + pristine_git_object: fbcf5892d7f0a3ed8b3872d71dd95ed3a25463d1 + src/mistralai/client/models/classifierfinetuningjob.py: + id: a244d5f2afc5 + last_write_checksum: sha1:ceb13935702275025284bb77aa8bf5ccf926e19c + pristine_git_object: fb160cf8e16a1b4899f8bb2803b18ba1f55232ce + src/mistralai/client/models/classifierfinetuningjobdetails.py: + id: 75c5dee8df2e + last_write_checksum: sha1:6b3f2f7ca3bd4e089591f5f9c59b7e28a00447f8 + pristine_git_object: 5d73f55ee0f1321fdeeb4db1971e144953e8e27f + src/mistralai/client/models/classifiertarget.py: + id: 2177d51d9dcf + last_write_checksum: sha1:c801dacc31e2d7682285a9a41d8ef38fa2e38fb9 + pristine_git_object: 4d66d789a42a0bc8762998161f1ad801bd8d96d4 + src/mistralai/client/models/classifiertargetresult.py: + id: 19c343844888 + last_write_checksum: sha1:3f5b37de3585cb38a3e41f0ee49dc4b5a33bf925 + pristine_git_object: 8ce7c0ca167b38ebaf1e5fc6393ab56d9f142cfa src/mistralai/client/models/classifiertrainingparameters.py: id: 4000b05e3b8d - last_write_checksum: sha1:4063f78ea65f138578bef4ce8908b04e556cc013 - pristine_git_object: f360eda504f0aa3f60ba6834aab59c1beb648151 - src/mistralai/client/models/classifiertrainingparametersin.py: - id: 4b33d5cf0345 - last_write_checksum: sha1:7764e6e6c5fc58e501c0891d036bbb22a8ddcb07 - pristine_git_object: 85360a7e7ba5212ef9052d3bd5f368ea4e2c4d98 + last_write_checksum: sha1:d7ce2f1017463c52856b973d696c9abecf5f79e3 + pristine_git_object: 14fa4926f8b5b62aa6b5d8864c40d5acf66e7b15 src/mistralai/client/models/codeinterpretertool.py: id: 950cd8f4ad49 - last_write_checksum: sha1:b014008db6ddce4b35aedec70783d74ce1b5cf83 - pristine_git_object: f69c7a5777af16df151589d2c5c8d81de4d28638 + last_write_checksum: sha1:8c3d91805d6c5f5cc9d249216694781faf15ea68 + pristine_git_object: ce14265f6d312c3da52014d2a058b6a730d5c980 src/mistralai/client/models/completionargs.py: id: 3db008bcddca - last_write_checksum: sha1:4c4ba2d39540bbb06fc1c49815fc6a7c8cf40ab2 - pristine_git_object: 918832acf3ea3d324c20e809fcdb1eae2ba3d7fd + last_write_checksum: sha1:e3d36235610c0546d8a2f2bb0a1db0f953747d88 + pristine_git_object: ab5cf5ff2d4df92d00664803f9274696ae80216d src/mistralai/client/models/completionargsstop.py: id: 5f339214501d last_write_checksum: sha1:744878976d33423327ea257defeff62073dad920 pristine_git_object: 39c858e66380044e11d3c7fd705334d130f39dea src/mistralai/client/models/completionchunk.py: id: d786b44926f4 - last_write_checksum: sha1:04b634cffa4b0eb8ca177c91d62d333a061160df - pristine_git_object: 67f447d0c6cd97cb54ffcd0c620654629ac4e848 - src/mistralai/client/models/completiondetailedjobout.py: - id: 9bc38dcfbddf - last_write_checksum: sha1:4771444753ff456829249d4e5fa5f71f2328fa78 - pristine_git_object: cd3a86ee28cdbf3a670d08f27642294321849ec0 + last_write_checksum: sha1:15f1b57b696b46bf6986c8f1a53d6bbf8d2351e2 + pristine_git_object: 5fd6c173ef29fb9bf2f570e0c2300268221e1ad3 src/mistralai/client/models/completionevent.py: id: c68817e7e190 last_write_checksum: sha1:dc43ac751e4e9d9006b548e4374a5ec44729eea4 pristine_git_object: 3b90ab0c1ecac12f90e0ae3946a6b61410247e4f - src/mistralai/client/models/completionftmodelout.py: - id: 0f5277833b3e - last_write_checksum: sha1:1c83e1d0a868eef32792844d787c5aaede0386b8 - pristine_git_object: 7ecbf54aabf022392e6d2ce2d0a354b9326eec79 - src/mistralai/client/models/completionjobout.py: - id: 712e6c524f9a - last_write_checksum: sha1:2c8500593b8f9257a0a389f87792cd174fcd7209 - pristine_git_object: 42e5f6c65809aaaa02f0bf58fbf031f4c476208b + src/mistralai/client/models/completionfinetunedmodel.py: + id: f08c10d149f5 + last_write_checksum: sha1:5fbd8c5475c250cbed1c2d2f47de372e8e92b128 + pristine_git_object: 54a1c1656aea1954288e9144670c939e29a83c47 + src/mistralai/client/models/completionfinetuningjob.py: + id: c242237efe9b + last_write_checksum: sha1:e4352be2411c7026c054a6fe380b87242183d4e4 + pristine_git_object: 1bf0a730c389be30bac2acfa17ffc6b5891e4918 + src/mistralai/client/models/completionfinetuningjobdetails.py: + id: e8379265af48 + last_write_checksum: sha1:b11c9bdc161da6a5cbd9f35f4bc5b51f0f3cea9c + pristine_git_object: cb7870219b261e260feceb6109088b0bbf8a6408 src/mistralai/client/models/completionresponsestreamchoice.py: id: 5969a6bc07f3 - last_write_checksum: sha1:874d3553d4010a8b83484588dcbf9136bd8c6537 - pristine_git_object: 119a9690727ae296acf72dcfafdd224a61582599 + last_write_checksum: sha1:59730cdaeeb3e95f4d38f63c34a4e491f40e6010 + pristine_git_object: a52ae892fcaafe54918160d055ee2badac31404e src/mistralai/client/models/completiontrainingparameters.py: id: be202ea0d5a6 - last_write_checksum: sha1:fd9a12417cd4f7bdc1e70ba05bbfef23b411ddd0 - pristine_git_object: 4b846b1b9bbcc4f2c13306169b715f08241e8f1c - src/mistralai/client/models/completiontrainingparametersin.py: - id: 0df22b873b5f - last_write_checksum: sha1:a92e9df1d5be2a7f2d34b1dcde131e99e5ee351d - pristine_git_object: 20b74ad9fc0c50fe7d1d3dd97fcd3c296fbf7042 + last_write_checksum: sha1:1a797019770795edcd911ff5b3580bedb83c05f4 + pristine_git_object: ca50a7ad521b46f275dd3a39c98911f13ee527c8 src/mistralai/client/models/contentchunk.py: id: c007f5ee0325 - last_write_checksum: sha1:5cedb52346bc34cb30950496d34ab87d591b6110 - pristine_git_object: eff4b8c670f47f53785690415751be05284f3d8b + last_write_checksum: sha1:b921b03b4c1e300b0e3f51ea9eadd4d7c4b7a0ea + pristine_git_object: e3de7591a089a3739af17108cecdc2d4240f10bf src/mistralai/client/models/conversationappendrequest.py: id: 81ce529e0865 - last_write_checksum: sha1:83e883e4324d76d74521607390747ecdf7dffaa0 - pristine_git_object: 0f07475e4ca640ce50a6214fe59a91041a2e596a + last_write_checksum: sha1:bdae860241893ec3ab3f22bd57c45dede2927da3 + pristine_git_object: 386714fd6dcccff8abb2247d7474949d9e8e79f8 src/mistralai/client/models/conversationappendstreamrequest.py: id: 27ada745e6ad - last_write_checksum: sha1:12c3c63b763bd16398fcbec7d6fab41729ee81a6 - pristine_git_object: a0d46f727ff99d76a1bf26891df3b0ed80a88375 + last_write_checksum: sha1:0a563cb146c4806ee6a133d10e7af8839e6f38dd + pristine_git_object: 32f6b148c647d3bac8edada3b941c51c17d78901 src/mistralai/client/models/conversationevents.py: id: 8c8b08d853f6 - last_write_checksum: sha1:6362a88ae26cb67f7abc3d2b0963f9a869c15371 - pristine_git_object: f24760381501f822593ef5903df0d32ca3cf9b47 + last_write_checksum: sha1:2eedde1ecf31061fb13de0b1bdc9ea311897b570 + pristine_git_object: 17812983f3aee3e675d44f46ca1b741315c2139a src/mistralai/client/models/conversationhistory.py: id: 60a51ff1682b - last_write_checksum: sha1:6fa8bdd370239df879da7b687c037405a8fbbe25 - pristine_git_object: 92d6cbf90c9c76945ee79752d5b4232aea10a79d + last_write_checksum: sha1:8984a0b12766e350022796a44baf6aac4c93f79b + pristine_git_object: ceef115b70ff02da05ac97571a177edf5b5f6cf6 src/mistralai/client/models/conversationinputs.py: id: 711b769f2c40 last_write_checksum: sha1:5fc688af61d6a49ede9c9709069f3db79f4dc615 pristine_git_object: 7ce3ffc3772926a259d714b13bfc4ee4e518f8f7 src/mistralai/client/models/conversationmessages.py: id: 011c39501c26 - last_write_checksum: sha1:408e26cb45dc1bdf88b1864d365e636307920df3 - pristine_git_object: 1aa294a497d2eb27a12dcbcce36c7956f6ee4f4e + last_write_checksum: sha1:95e3abe55199f2118e6fb7e5d8520af6a929449a + pristine_git_object: 84664b62337dcdc408bb01e0494fa598e6a86832 src/mistralai/client/models/conversationrequest.py: id: 58e3ae67f149 - last_write_checksum: sha1:f1b0b2b6a9c9b94ed5e3a77fb0b92e695f421a2e - pristine_git_object: 2005be82d8ebcf8c8fa74074abf25f072e795582 + last_write_checksum: sha1:f7a67082e06c1789f4c6a4c56bfef5f21cce5034 + pristine_git_object: 83d599ebf984f1df2390d97dbe651881f7dee0e2 src/mistralai/client/models/conversationresponse.py: id: ad7a8472c7bf - last_write_checksum: sha1:8b625fe8808f239d6bc16ecf90ae1b7f42262c0c - pristine_git_object: 24598ef3fc24a61a0f15ab012aa211ba57cd0dcf + last_write_checksum: sha1:99148d75abcb18c91ba0a801174461346508f5fb + pristine_git_object: f6c10969a931eaf1a4667b0fcff3765f57658b15 src/mistralai/client/models/conversationrestartrequest.py: id: 681d90d50514 - last_write_checksum: sha1:0ce81536464db32422165c35252770f3197fb38e - pristine_git_object: 35d3099361274440552e14934b6a1b19ebc8f195 + last_write_checksum: sha1:99123cee7c54f44c02b56111305af399143b4e5a + pristine_git_object: 7ae16aff4de36a91093d3021b66283e657b00897 src/mistralai/client/models/conversationrestartstreamrequest.py: id: 521c2b5bfb2b - last_write_checksum: sha1:b996f57271f0c521113913f48b31d54c17d73769 - pristine_git_object: 0ddfb130d662d954c3daabdf063172b8ea18a153 + last_write_checksum: sha1:abfd14652b4785c36de84a59593b55f7a6a2d613 + pristine_git_object: 0e247261d997ac3d8ff0155ba54cc4cafe9ac65a src/mistralai/client/models/conversationstreamrequest.py: id: 58d633507527 - last_write_checksum: sha1:fc4f2f1578fbeb959ddbe681dee2d11f0a4e6c5e - pristine_git_object: 379a8f2859b5f40cc744ad8f9bc6c39a198258b5 + last_write_checksum: sha1:7dc25a12979f4082ed7d7e37584bb9c30297f196 + pristine_git_object: a20dccae1a60753ed95f59da0df78c204c19d515 + src/mistralai/client/models/conversationthinkchunk.py: + id: 77e59cde5c0f + last_write_checksum: sha1:5db067661a5d4b0c13db92ad93da1aab9e0e7a34 + pristine_git_object: e0e172e3edbe46c000e82e712c135b96a65312e9 src/mistralai/client/models/conversationusageinfo.py: id: 6685e3b50b50 - last_write_checksum: sha1:60f91812b9b574b3fade418cc7c2191253f6abbf - pristine_git_object: 98db0f1617bd7484750652997dcd43d08ef7c5fc - src/mistralai/client/models/createfinetuningjobop.py: - id: fd3c305df250 - last_write_checksum: sha1:e29ada8f733de44bfeab2885d2221ade84b34619 - pristine_git_object: f55deef5d9f6134fddb02c458a0d812759cea358 - src/mistralai/client/models/createorupdateagentaliasop.py: - id: a79cf28bda01 - last_write_checksum: sha1:d4f2790b5970c9cf30b3fcee9d8bc6d4b8c33778 - pristine_git_object: cde1dd054c447a8617527585e783a95affba3277 - src/mistralai/client/models/deleteagentaliasop.py: - id: e4d0d7f75b24 - last_write_checksum: sha1:66e34ba7fb1a238d55c7ed380bd666c8975c01b4 - pristine_git_object: c52d099e9c1f28bf37ee009833b5fb8e351ed987 - src/mistralai/client/models/deleteagentop.py: - id: 089fb7f87aea - last_write_checksum: sha1:a196bcc758e36ffeb17fab25bb60451d3d66a4d8 - pristine_git_object: 8b14bca7bf5d67e16181b67ef6b7375c1b0a93fd - src/mistralai/client/models/deleteconversationop.py: - id: 86fefc353db0 - last_write_checksum: sha1:48f33b614ec087fdaf2b29d9c3eefd6e8d7d311f - pristine_git_object: 39607f40640c6dfa3ef20d913a90abee602b9b4a - src/mistralai/client/models/deletedocumentop.py: - id: 62522db1ccf2 - last_write_checksum: sha1:1a4e2e72a0d3cd24e184ce3cc5037f5ec7cdd9a5 - pristine_git_object: 400070a49bc046d8132bfc7dfe3e114faa719001 - src/mistralai/client/models/deletefileop.py: - id: 286b4e583638 - last_write_checksum: sha1:2561c1fe03ec3915dfa48fa354a86a56ba9b54c4 - pristine_git_object: 4feb7812f8acfa366e4b46fc914925df4f705528 - src/mistralai/client/models/deletefileout.py: - id: 5578701e7327 - last_write_checksum: sha1:a34520be2271c1e37fa8b3c1bdead843db7b1bb9 - pristine_git_object: c721f32cfe752c2c084efb72db3e5409795e387a - src/mistralai/client/models/deletelibraryaccessop.py: - id: df80945bcf19 - last_write_checksum: sha1:065aad372e0bbfd998fe3adc3389e3dbc9d5b674 - pristine_git_object: ca14c3ffc43be3aee14d6aa1f4805f0483d8b676 - src/mistralai/client/models/deletelibraryop.py: - id: cd0ce9bf8d51 - last_write_checksum: sha1:07840cbdb741bba291f1db1a1b54daca99e8f7ea - pristine_git_object: 5eb6fc310aa62454e3f7ed0766212c807125fe8c - src/mistralai/client/models/deletemodelop.py: - id: 2c494d99a44d - last_write_checksum: sha1:97dce35d527e03612068896572824cc0f13269c1 - pristine_git_object: 55c4b2422336ef6e148eedbd4a6a60846d187e9b + last_write_checksum: sha1:3e0489836936a7a77fa3b41adde1eb459ecd176d + pristine_git_object: 1e80f89ee4f7a3d464df2bf39990b467029e86c1 + src/mistralai/client/models/createagentrequest.py: + id: 442629bd914b + last_write_checksum: sha1:273dde9338cc1eb166ee40f4c6215f90cae908ab + pristine_git_object: 54b09880eefe348d2e003ed1b238b67cb58b8e34 + src/mistralai/client/models/createbatchjobrequest.py: + id: 56e24cd24e98 + last_write_checksum: sha1:e648017622cd6e860cb15e5dd2b29bf9f2a00572 + pristine_git_object: 9a901fefee0ea6a825274af6fd0aa5775a61c521 + src/mistralai/client/models/createfileresponse.py: + id: fea5e4832dcc + last_write_checksum: sha1:b7f3ba95a09a3225eae80b53152fe2b7d3806fbe + pristine_git_object: 768212803bc3535ac8a27a9c0d48f147e3d536b7 + src/mistralai/client/models/createfinetuningjobrequest.py: + id: c60d2a45d66b + last_write_checksum: sha1:2e8e608140860bba9ecfa9498d61cf807f96680a + pristine_git_object: e328d944ce2a71ffbec027965d31075070647dbc + src/mistralai/client/models/createlibraryrequest.py: + id: 1c489bec2f53 + last_write_checksum: sha1:45fa65be82712ce99304027c88f953f0932bdae4 + pristine_git_object: 58874e014275b06ce19d145aaa34a48d11ca0950 + src/mistralai/client/models/delete_model_v1_models_model_id_deleteop.py: + id: 767aba526e43 + last_write_checksum: sha1:73568f2f450bf9c23aca3649372a92e1b9a2fc54 + pristine_git_object: 199614f53501f34088cb112d6fe1114e1e588d8a + src/mistralai/client/models/deletefileresponse.py: + id: 3ee464763a32 + last_write_checksum: sha1:2c0df66fc8c4384d50e54ac03577da3da2997cf5 + pristine_git_object: ffd0e0d015e38e5f6113da036ebeba98441444f4 src/mistralai/client/models/deletemodelout.py: id: ef6a1671c739 - last_write_checksum: sha1:4606683ef6da0aae7e88bc50144eddc83908f9d7 - pristine_git_object: bf22ed177ee91dce98bfd9b04f02e683c79e4860 + last_write_checksum: sha1:d67ac7c3fa143be40c74455c7206c94bfb5a2134 + pristine_git_object: fa0c20a419c59b8fc168c150b28d703398ea7f40 src/mistralai/client/models/deltamessage.py: id: 68f53d67a140 - last_write_checksum: sha1:ff7fa85086bd56863f7f4a255b008cfaa11a959c - pristine_git_object: fbb8231a310e90afd50951dd0f572ce3e0f029e6 + last_write_checksum: sha1:b18350de03a8685bea5ac52e1441415b5e58bdf4 + pristine_git_object: d9fa230e93d4e0886f21c836cf3813855eb8f9fd + src/mistralai/client/models/document.py: + id: fbbf7428328c + last_write_checksum: sha1:2a5a28c54f0aec50059b6badc1001b1cd120e7d3 + pristine_git_object: 31eebbd1a7d7fdcb498259837c533bfc8008a6f9 src/mistralai/client/models/documentlibrarytool.py: id: 3eb3c218f457 - last_write_checksum: sha1:e5bfb61a4a03a3b28837c27195f1bcd8cc14c6b2 - pristine_git_object: ff0f739391404604c1cc592c23507946aa0b693f - src/mistralai/client/models/documentout.py: - id: 7a85b9dca506 - last_write_checksum: sha1:f041a4866c67d1f81f62282918d625216a760355 - pristine_git_object: 3b1a5713c84512947a07d153792b17fcf3262dcb + last_write_checksum: sha1:d03a6136192b56778bd739d834d9bdc80a09cc23 + pristine_git_object: 642c3202b11c5bb8a2b41cf8ae0fe43f73aa2a81 src/mistralai/client/models/documenttextcontent.py: id: e730005e44cb last_write_checksum: sha1:c86f4b15e8fda1cd5c173da01462342cd22b7286 pristine_git_object: b6904cb4267347b62a457a01b91a391500326da9 - src/mistralai/client/models/documentupdatein.py: - id: d19c1b26a875 - last_write_checksum: sha1:bddd412de340d050cfbdd4206a9fbb3d1660a045 - pristine_git_object: 669554de5d33f6163c8d08fefee52c1869662eba src/mistralai/client/models/documenturlchunk.py: id: 4309807f6048 - last_write_checksum: sha1:186a684da48bb5d237769ecb3dbf1479a5c5ee55 - pristine_git_object: 304cde2b687e71b0d2fb0aee9b20826473375b25 - src/mistralai/client/models/downloadfileop.py: - id: 4d051f08057d - last_write_checksum: sha1:b80c5332cfdb043bb56f686e4e1c4bf26495b04b - pristine_git_object: fcdc01d644bdce8d1fc7896b5f8244a7a5311dfa + last_write_checksum: sha1:33cdaccb3a4f231730c7fa1db9f338a71e6311b2 + pristine_git_object: 43444d98b8b7fb430f9c33562c35072d9c79a263 src/mistralai/client/models/embeddingdtype.py: id: 77f9526a78df last_write_checksum: sha1:a4e2ce6d00e6d1db287a5d9f4254b0947227f337 pristine_git_object: 732c4ebe3678563ebcdbafd519f93317261586fb src/mistralai/client/models/embeddingrequest.py: id: eadbe3f9040c - last_write_checksum: sha1:6071612944c4c603803cc7f2adc1e9784549c70f - pristine_git_object: f4537ffa9bdc0a9a73101e1b1524fed1a09c1a65 + last_write_checksum: sha1:e36282eb015b782804b4bdf3d18b596607b020fd + pristine_git_object: 15950590fec8b82a4fb28d69009a6f6cfb83c9ee src/mistralai/client/models/embeddingresponse.py: id: f7d790e84b65 last_write_checksum: sha1:9bb53a5a860c8e10d4d504648d84da73068c0a83 pristine_git_object: 6ffd68941f32f396998df9dded14ff8365926608 src/mistralai/client/models/embeddingresponsedata.py: id: 6d6ead6f3803 - last_write_checksum: sha1:3e2430e6bd9b3c77a564f4e56edec1274446a1f4 - pristine_git_object: a689b290d5a4b360e409413c96bb5e7288ce2e2e + last_write_checksum: sha1:ba5f38ee6e2b0436532229da01ba79ee49c20d12 + pristine_git_object: 098cfae06eae6a92830b4b5a26985f5d5950e512 src/mistralai/client/models/encodingformat.py: id: b51ec296cc92 last_write_checksum: sha1:ea907f86b00323d99df37f7ff45d582aace798e7 @@ -2156,262 +2112,298 @@ trackedFiles: id: 62d6a6a13288 last_write_checksum: sha1:015e2db9e8e5a3e4ce58442ccedaf86c66239dde pristine_git_object: 56d82cbed237f32a8b00cfee4042dfe3e7053bcb - src/mistralai/client/models/eventout.py: - id: da8ad645a9cb - last_write_checksum: sha1:67f7cc29102a971d33b6cbbcb06ffcfe595227a5 - pristine_git_object: a0247555bb816061cb22f882406c11c3a9011818 + src/mistralai/client/models/event.py: + id: e5a68ac2dd57 + last_write_checksum: sha1:8ed848fe2e74c7f18ee8f4dcba39ad1c951c16d2 + pristine_git_object: c40ae2b1a1b8131a90c637e3268872b97b22683e src/mistralai/client/models/file.py: id: f972c39edfcf - last_write_checksum: sha1:8d0adce8f4dfc676f6da6465547a0d187d4326f1 - pristine_git_object: dbbc00b50e5578230daefa47648954ead8ed8eb9 + last_write_checksum: sha1:609381a40a4bfdda2e7e750a848cd2bb38d6ac0f + pristine_git_object: 1b0ea1d4a288d9723dcdd7cfda99d49c5cbd9e7c src/mistralai/client/models/filechunk.py: id: ff3c2d33ab1e - last_write_checksum: sha1:9f970ef8366df8087f9332a4b1986540063a1949 - pristine_git_object: 43ef22f861e0a275c7348133d0c4d04551477646 + last_write_checksum: sha1:d7561c39252b81007a8e079edb4f23989ffd510e + pristine_git_object: 5c8d2646dc0d5c732828bdd81c5a58e12fa92a42 src/mistralai/client/models/filepurpose.py: id: a11e7f9f2d45 last_write_checksum: sha1:8b167c02f9f33e32d5fd1c6de894693924f4d940 pristine_git_object: 49a5568ff82ad4a85e15c8de911e8d6c98dcd396 + src/mistralai/client/models/files_api_routes_delete_fileop.py: + id: 2f385cc6138f + last_write_checksum: sha1:ccfd3ff64635cfd511f49c5e02a6f1860c479966 + pristine_git_object: eaba274b9dd94d6cf729325316b3e3e9b3834566 + src/mistralai/client/models/files_api_routes_download_fileop.py: + id: 8184ee3577c3 + last_write_checksum: sha1:81058ede2a5eb333b54561f99ed7878082c0f411 + pristine_git_object: 83de8e73a3d50917e4a41bb92a828a10e646a632 + src/mistralai/client/models/files_api_routes_get_signed_urlop.py: + id: 0a1a18c6431e + last_write_checksum: sha1:ef4908b9d2e43c0256d25a5aa533c5bdc1205113 + pristine_git_object: 64cd6ac57b4f2de70403e11062307a8d8d5d94e7 + src/mistralai/client/models/files_api_routes_list_filesop.py: + id: b2e92f2a29b4 + last_write_checksum: sha1:71e67fc63f0df28c534d4bd03a6464ae88959dc2 + pristine_git_object: b03e2f886ce02d4beabca150302a924ae63ad507 + src/mistralai/client/models/files_api_routes_retrieve_fileop.py: + id: 5d5dbb8d5f7a + last_write_checksum: sha1:d451d8d2b32f412158a074919cca1a72f79940cb + pristine_git_object: 5f8de05f1bba07517dc2ee33a4f05122503b54b5 + src/mistralai/client/models/files_api_routes_upload_fileop.py: + id: f13b84de6fa7 + last_write_checksum: sha1:d38a86b9e7d338278e14c68756654d85bc330070 + pristine_git_object: 54ff4e4951a58e13993be0f5d2c16b0cb11c0978 src/mistralai/client/models/fileschema.py: id: 19cde41ca32a - last_write_checksum: sha1:245115d1f955324bce2eeb3220bdaa6906b28e92 - pristine_git_object: cbe9b0d17ad15ce02e9fd973fe49666885c6ff92 - src/mistralai/client/models/filesignedurl.py: - id: a1754c725163 - last_write_checksum: sha1:5d981b1743aa2d84818597b41a5f357b4256e9e0 - pristine_git_object: 53dff812ffe5c5859794424d49f8bd7f735cf3b0 + last_write_checksum: sha1:0b3acb889a2c70998da4076e2f4eef3698e8b117 + pristine_git_object: e99066a9eb19daebcf29f356225635a297c444e1 src/mistralai/client/models/fimcompletionrequest.py: id: cf3558adc3ab - last_write_checksum: sha1:db51cde0b13bb373097f2c158b665ccb3c5789f4 - pristine_git_object: e2f6032784c996d18c100b8b2cde4bb4432af884 + last_write_checksum: sha1:20bca1f6a0ab6e84f48b6e332f0c3242da84ae45 + pristine_git_object: ea877213d1abe4811fee188eb7a60ccf1bb51f18 src/mistralai/client/models/fimcompletionresponse.py: id: b860d2ba771e last_write_checksum: sha1:dffd5a7005999340f57eaa94e17b2c82ddc7fd90 pristine_git_object: 1345a116b7855ab4b824cf0369c0a5281e44ea97 src/mistralai/client/models/fimcompletionstreamrequest.py: id: 1d1ee09f1913 - last_write_checksum: sha1:df973050b942b844280bf98f0a3abc90bd144bbb - pristine_git_object: 480ed17ab006e7afa321a91c5ccebd6380f8f60c + last_write_checksum: sha1:aa8313ecdd852034aaf6ec23dc3f04f7ef8e28e5 + pristine_git_object: e80efc095feb2e2df87f6d3c3f9c56b6cbf347b3 src/mistralai/client/models/finetuneablemodeltype.py: id: 05e097395df3 last_write_checksum: sha1:daf4cd1869da582981023dea1074268da071e16a pristine_git_object: 7b924bd7abc596f0607a513eee30e98cbf7ab57a + src/mistralai/client/models/finetunedmodelcapabilities.py: + id: 475c805eab95 + last_write_checksum: sha1:5919e48a6778f1a2360ce090d05b41b1bf33253f + pristine_git_object: 2f4cca0b8c0e3e379f5c2aa67953f2e55757f68d src/mistralai/client/models/ftclassifierlossfunction.py: id: d21e2a36ab1f last_write_checksum: sha1:ca90e2f1cd0b9054293bea304be0867c93f7fac2 pristine_git_object: ccb0f21b5a69f91119bec9db6e9f3d876e4c35af - src/mistralai/client/models/ftmodelcapabilitiesout.py: - id: f70517be97d4 - last_write_checksum: sha1:2bc7700ad89b7aab37fa02fcb6d9282bc252315e - pristine_git_object: 42269b785d9d5ad2257179f2c093c62637fb5dd6 src/mistralai/client/models/ftmodelcard.py: id: c4f15eed2ca2 - last_write_checksum: sha1:7441e4155beaa97cea47b6295017f567dd6eee1a - pristine_git_object: 570e95e2276b144e008e9ccf6a108faa1fc835f5 + last_write_checksum: sha1:b1b36ff994bcadd8c917880333627fd05976c991 + pristine_git_object: 2c26ff2f66faa55dc5a5a1743720e8f3f5d4d0f1 src/mistralai/client/models/function.py: id: 32275a9d8fee - last_write_checksum: sha1:356a2c6c9d2437e60036a9b3d1a3d154302363c8 - pristine_git_object: 3632c1afb40aebab0795f754814036e04c251469 + last_write_checksum: sha1:ca24a512de22787932d7f4af005699621926d6c0 + pristine_git_object: 1da1dcc9b637d0a5b0fbb7cf2761f6d01eb3068f src/mistralai/client/models/functioncall.py: id: 393fca552632 last_write_checksum: sha1:6e96e9abaa9b7625a9a30e376c31b596ee9defcb pristine_git_object: 527c3ad408e1e1ccfe6301a8860e7f751e1d312d src/mistralai/client/models/functioncallentry.py: id: cd058446c0aa - last_write_checksum: sha1:6ece3816c50bd04b908743ad62e2dc71d815842a - pristine_git_object: 6ada1d358641a23bc83b93f222eeff659a124b34 + last_write_checksum: sha1:776f397d17f946bae2929998f14d991a1ccc99e0 + pristine_git_object: d05fad856729a76dd24f8aa4d050f8381e51ed6a src/mistralai/client/models/functioncallentryarguments.py: id: 3df3767a7b93 last_write_checksum: sha1:9858feba8f7f01017f10477a77dec851a1d06e55 pristine_git_object: afe81b24e131a8ef879ee7f140271aa762b8ed2f src/mistralai/client/models/functioncallevent.py: id: 23b120b8f122 - last_write_checksum: sha1:cb63fb3cfb4debfca7b207b49e592566619f84b1 - pristine_git_object: 5d871a0e0f15cc27afe3c861f387609aa9a8a17f + last_write_checksum: sha1:62b5b94df4e5b6f945ead78871cdbfceb6cd40cf + pristine_git_object: 849eed76d08524e5e4d1e7cc1c3fa04386f5ef75 src/mistralai/client/models/functionname.py: id: 000acafdb0c0 last_write_checksum: sha1:4145b7b817b712b85dcbedb309416c7ba72d827e pristine_git_object: 07d98a0e65ccbcba330fb39c7f23e26d3ffc833c src/mistralai/client/models/functionresultentry.py: id: 213df39bd5e6 - last_write_checksum: sha1:04a8fd7396777c412fa9c73c0bef148b2ab53cb2 - pristine_git_object: ca73cbb7481fe0e97b354e9abe5ef6034f10bd98 + last_write_checksum: sha1:3aa6834bf2beda061ac772a0a8a4d7ed5ad942a0 + pristine_git_object: 01e2e36fc0a9de6a2b06a4205004992baf0f9e43 src/mistralai/client/models/functiontool.py: id: 2e9ef5800117 - last_write_checksum: sha1:5c4ea61a1bccd87e1aae06bfa728c29a4ec60c54 - pristine_git_object: 13b0449687f64848cb2f2fdf792f148f9e3cfed9 - src/mistralai/client/models/getagentop.py: - id: 5a28bb1e727e - last_write_checksum: sha1:50a681253a1075f1268a269cd67154efa35dff6a - pristine_git_object: 55d8fe6860fa4c868c4d6d5d5d2ce4571e9071b4 - src/mistralai/client/models/getagentversionop.py: - id: a0db5a6aab1f - last_write_checksum: sha1:d1dfc0927abcae22460838902d1f5ddc2a224856 - pristine_git_object: 77b8a2662939e03b261f713aa7d9676746a4df1e - src/mistralai/client/models/getbatchjobop.py: - id: 443103fe3b88 - last_write_checksum: sha1:3a7f9656f3d169c60f0d3f16b00c4136d193468e - pristine_git_object: 792c3e2121902734094a7224c8605109fc697f44 - src/mistralai/client/models/getconversationhistoryop.py: - id: c863a4cbeb34 - last_write_checksum: sha1:4e04b4550c7b48635eca1943bcfee64027f0e7ca - pristine_git_object: c1fbf3de4ee966fffa2400a9c109d952b26543da - src/mistralai/client/models/getconversationmessagesop.py: - id: bb8a90ba7c22 - last_write_checksum: sha1:1b7aad5c74338aeecb11de44d8378aaa75498e37 - pristine_git_object: 6666198edce05a99c55f1c35f26f6d3b548c9b0d - src/mistralai/client/models/getconversationop.py: - id: 1a622b8337ac - last_write_checksum: sha1:4665e81fae4f12fabc09629f32d28c1c2de2bcf2 - pristine_git_object: d204d1755b4dc23ba8397ad24fec30bd064eacce - src/mistralai/client/models/getdocumentextractedtextsignedurlop.py: - id: 69099395d631 - last_write_checksum: sha1:f6d5e8499a314e903301e419fb206c33644363ff - pristine_git_object: 9a71181d3abd625643e741c562fe73f25bf12932 - src/mistralai/client/models/getdocumentop.py: - id: de89ff93d373 - last_write_checksum: sha1:4d1f358dfe3b44ccd2a88aea6730fbaf4b5f1d93 - pristine_git_object: d7b07db791a3adb3992475f0cf49c3fe01007ad9 - src/mistralai/client/models/getdocumentsignedurlop.py: - id: b8d95511c6d1 - last_write_checksum: sha1:255a0b505d558db3149652822718c7bcecc706e8 - pristine_git_object: e5d56c54c1ffc3529a8d1cf013bcb3327392b269 - src/mistralai/client/models/getdocumentstatusop.py: - id: f1f40b8f003f - last_write_checksum: sha1:c442daff8adb3db0ac58b03e54b7c05c82b202a9 - pristine_git_object: 4206f593ca58650f9df17b377b67c374a1b0d883 - src/mistralai/client/models/getdocumenttextcontentop.py: - id: ba23717093ef - last_write_checksum: sha1:33f047af38e4be2b71f4d90a36614ea7ab096a28 - pristine_git_object: 8a7b4aae025bbcb5ade5d4d36f2bb5e34cbb315e - src/mistralai/client/models/getfilesignedurlop.py: - id: 1aa50b81c8cf - last_write_checksum: sha1:a8fb95f119d173dd1d7afed02597a297dbbc7a89 - pristine_git_object: 06ed79eea058d4ebffc5d0b87ae2d06a32f4755a - src/mistralai/client/models/getfinetuningjobop.py: - id: afe997f96d69 - last_write_checksum: sha1:25db6d0d336a78189b603bbce16b0e0de84a33f1 - pristine_git_object: 1fb732f48a1a4c2993185a6a272879a83c80dc06 - src/mistralai/client/models/getlibraryop.py: - id: c84a92e23a90 - last_write_checksum: sha1:d51c0cf40a6ed398b0cb7078fe897d047b55e251 - pristine_git_object: bc0b4a238b146c6e5853e0b9d3031a876f30bc17 + last_write_checksum: sha1:bce744d77a3dac92d4776a37be497311674bdc7d + pristine_git_object: eae872643c85115a825c2feda11d9a6c12a06b99 + src/mistralai/client/models/getfileresponse.py: + id: 81919086e371 + last_write_checksum: sha1:fc0232e54c0de355058c5bd82e424953b1659b56 + pristine_git_object: f625c153799dcd38e4990504d48371112b65cd15 + src/mistralai/client/models/getsignedurlresponse.py: + id: cee4e4197372 + last_write_checksum: sha1:ab9adbc06e7f02e791dc549ad1850ce1b1a250a7 + pristine_git_object: 4ba95894f2b89719fa58e7e397c28014dbd00316 + src/mistralai/client/models/githubrepository.py: + id: 4bc83ce18378 + last_write_checksum: sha1:21aa04bc426158ccbe1ded3bc65b46e6869e897d + pristine_git_object: 84b01078c2192de5d6668a6943d416a2ff30db5f src/mistralai/client/models/githubrepositoryin.py: id: eef26fbd2876 - last_write_checksum: sha1:cc98805951c3f80d9b8f0ba4037cf451551b0742 - pristine_git_object: e55389c380416f69ed7dc085cbbaaba056c4d1ba - src/mistralai/client/models/githubrepositoryout.py: - id: d2434a167623 - last_write_checksum: sha1:76d98ac7613e626599cb4c7a0b0366e9b20815ff - pristine_git_object: 514df01c217b40d8c050839ac40b938c68ef1bf6 - src/mistralai/client/models/httpvalidationerror.py: - id: 4099f568a6f8 - last_write_checksum: sha1:be2db0d4ec07da0ddb37878761545c3dde8fb8ec - pristine_git_object: e7f0a35bf208c32086c7b448273d1133d0f1027b + last_write_checksum: sha1:18bd07155fff4b99d114353fee95e6bd828aeacd + pristine_git_object: 38bcc2087630f2fd4e9e5fa149449c32e21fdb07 + src/mistralai/client/models/imagedetail.py: + id: c1084b549abb + last_write_checksum: sha1:375db5c8fa87712dc37e46d0bf72283ae6cd6400 + pristine_git_object: 1982d357277a92fc7ebea3b99146116596d99c78 src/mistralai/client/models/imagegenerationtool.py: id: e1532275faa0 - last_write_checksum: sha1:85122792c3ba324086096345119fedf326f55c86 - pristine_git_object: 680c6ce2d08277e65e23ea3060e83c1fa4accb78 + last_write_checksum: sha1:88a1347876f69960dc33f8e2cb9929ab1a90a224 + pristine_git_object: c1789b18028156ae683d0323e65e47a43694570f src/mistralai/client/models/imageurl.py: id: e4bbf5881fbf - last_write_checksum: sha1:9af5cff0b3a2c1c63e2bd1f998dcfeab273fd206 - pristine_git_object: 4ff13b1ccbc157f21013aacd7a062e89a26dcbf9 + last_write_checksum: sha1:28ef2509fdb489ecf379b60e883e6957aebd2797 + pristine_git_object: ac1030f5d61144e393b2aa9f3ffea893faabb1f7 src/mistralai/client/models/imageurlchunk.py: id: 746fde62f637 - last_write_checksum: sha1:57e48972720a3e317291250d6d94c44d295b69f5 - pristine_git_object: 993185cce833c59ad341b977cf9263654951fa03 + last_write_checksum: sha1:0ac388d25cae5348ffb3821706c3a8b64e716ff5 + pristine_git_object: 7134b46e7428cee52eda859cb78387c99f7e1f5a src/mistralai/client/models/inputentries.py: id: 44727997dacb - last_write_checksum: sha1:44ef8e75dd43b82276a0f06ef5c6be9eed46b379 - pristine_git_object: dc9892956f0e2583c51bf49ef89adbd22b8646d5 + last_write_checksum: sha1:9e2a776be59c5043ea4179a60ac082faf064cc3d + pristine_git_object: e2da5a80aea121d18e2232f302ad73f63b4fc050 src/mistralai/client/models/inputs.py: id: 84a8007518c7 - last_write_checksum: sha1:871491fa3b24315bc1bddf371334381f75ab035d - pristine_git_object: cfcdeb3d5895ccb34512c2a0a2e799e763e09c09 + last_write_checksum: sha1:d067587b5395529fbd638741f20b80edb2848e39 + pristine_git_object: 9ecd7f484ea306b91a9ebf038a0addd80ccd57c4 src/mistralai/client/models/instructrequest.py: id: 6d3ad9f896c7 last_write_checksum: sha1:b56a77442b50b50151adedaa5ec356dc96c56428 pristine_git_object: e5f9cccf174d8e73c42e8ee4aa294b43e1ad6cf5 - src/mistralai/client/models/jobin.py: - id: f4d176123ccc - last_write_checksum: sha1:478a9beaf1c5ada536f5c333a47aa2ac0900bd16 - pristine_git_object: b3cb8998b5b0ce00414e40643eb3e259b2c0aabf - src/mistralai/client/models/jobmetadataout.py: - id: 805f41e3292a - last_write_checksum: sha1:1333181d5a3dff43076095f61e1d57f37085abbe - pristine_git_object: 1d386539d8c638d96b8f468cfca3241dfc07a9f3 - src/mistralai/client/models/jobsout.py: - id: 22e91e9631a9 - last_write_checksum: sha1:e9434f43df7df8e991eb0387eabcf308cae3cb65 - pristine_git_object: a4127a5d835c0f0ead04980f05cb293e18970905 + src/mistralai/client/models/jobmetadata.py: + id: cfbdde7fc0a2 + last_write_checksum: sha1:e1b180a47ca888d0fd4cbc34b62000d3ac86c2b5 + pristine_git_object: f6e96fa104e7a6c8ce9a94538a3d00167a2ae341 + src/mistralai/client/models/jobs_api_routes_batch_cancel_batch_jobop.py: + id: b56cb6c17c95 + last_write_checksum: sha1:21b5794f110c53691654d7195201f9a4b7793f21 + pristine_git_object: de2e63472ac53809cfeae200bd7d2f3dcbb70034 + src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobop.py: + id: 36b5a6b3ceee + last_write_checksum: sha1:b41862f037d74bbdc44fb4df5f65cd402a16703b + pristine_git_object: d779e1d96c359b0d548d5dee17c06ae2a505cf47 + src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobsop.py: + id: d8f0af99c94d + last_write_checksum: sha1:a50885f97cfd4d38bc3e3b0746c88bd602b88f94 + pristine_git_object: 89ac3c933347497b6fb1ec26fecb485802ef85fc + src/mistralai/client/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py: + id: 34f89d2af0ec + last_write_checksum: sha1:3d5242f757ee9be10963af9cd5d47824fc83c71a + pristine_git_object: 9fa99837dda7e9413d3a05822cd17107c5fae51d + src/mistralai/client/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py: + id: d175c6e32ecb + last_write_checksum: sha1:515b7737cf8262243ee6175e297714125f3962bc + pristine_git_object: 56fa534044522f27fb26ef4820d10f22752134ea + src/mistralai/client/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py: + id: 81651291187a + last_write_checksum: sha1:19a0707e2f73b0184959d7c710a170650fa1767a + pristine_git_object: db857f7d6cc77057491e4b968798f730228b09bc + src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py: + id: d910fd8fe2d6 + last_write_checksum: sha1:52704f01d7388a8b62d59b6f7cd94fcb7d067ebf + pristine_git_object: ddd9c1891356a7c272e0244a9aea3d3d6b2d00d6 + src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py: + id: cf43028824bf + last_write_checksum: sha1:36082bde6f3d932c66178729533e2a69040fdeab + pristine_git_object: ec80a158f45061b122f84ebaff89ae82ef8d98ef + src/mistralai/client/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py: + id: e7ff4a4a4edb + last_write_checksum: sha1:8cbfc309c09df806ad7d130004b4e1c2b89ede0a + pristine_git_object: cd25fa04f29dd544f01f3620b31d1c54c86addbb + src/mistralai/client/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py: + id: 7cc1c80335a9 + last_write_checksum: sha1:f66c16423155066b844f8e89446d2acbb6e68157 + pristine_git_object: fd01fe6948613b0fffef9ac76cf1a0f9011ec5af + src/mistralai/client/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py: + id: 6d9dc624aafd + last_write_checksum: sha1:fbacb171b9c75f1fe45406f542a958d10c15fae2 + pristine_git_object: 296070b426900305fe4596f03a3c9f081cdb2dcf src/mistralai/client/models/jsonschema.py: id: e1fc1d8a434a - last_write_checksum: sha1:6711508e9c1bd20fc8b1bfdbd1181ca29144ef0d - pristine_git_object: 948c94ed8fe8102a9cdced68fde6be03489f5778 - src/mistralai/client/models/legacyjobmetadataout.py: - id: 4f44aa38c864 - last_write_checksum: sha1:e93d512c8cb6e0812248a195ff869428209cd71f - pristine_git_object: 4453c15798f4fd4db2de64e0beaf7ad557d82fa1 - src/mistralai/client/models/libraryin.py: - id: 6147d5df71d9 - last_write_checksum: sha1:34c5c9582a488fe87da084e74316e0fd76aa28d1 - pristine_git_object: 1a71d410d997a6d3f197947f821117e0605517af - src/mistralai/client/models/libraryinupdate.py: - id: 300a6bb02e6e - last_write_checksum: sha1:c9b1a0a00d31fa839df12353f1a3ee9d0b3ffb60 - pristine_git_object: 328b2de3cd4e304fd462882eca7226e460b7c4a7 - src/mistralai/client/models/libraryout.py: - id: 4e608c7aafc4 - last_write_checksum: sha1:9841adb596398554dfcaeb35b7e5a0572c541cff - pristine_git_object: c7ab7b8d39b68b5998c4874f9942caa275cf65d9 - src/mistralai/client/models/listagentaliasesop.py: - id: ff038766a902 - last_write_checksum: sha1:eef4e471999d5df5195aea51cde027b55567aeef - pristine_git_object: 83c6d1769c10fe38402a36b6aff2a18da61f4504 - src/mistralai/client/models/listagentsop.py: - id: a573a873c404 - last_write_checksum: sha1:db3c9e6ddc146138ed971f9970d9a164c0f97456 - pristine_git_object: 863fc13af1429bd1a6c02a9a20d2b6cb0cad7b34 - src/mistralai/client/models/listagentversionsop.py: - id: ccc5fb48e78f - last_write_checksum: sha1:0f2306bcceba2a2d7bfeb0be33126514d9287f17 - pristine_git_object: 613d3d8516690e6cba15922dfe69bdf62c039b01 - src/mistralai/client/models/listbatchjobsop.py: - id: f49af453f5e6 - last_write_checksum: sha1:e48b0e7371ee8f637e4fd6bed140cdbb1d405a7d - pristine_git_object: 5322df816e391a5569afcfd14edaeb128467a176 - src/mistralai/client/models/listconversationsop.py: - id: d6007f6c1643 - last_write_checksum: sha1:ece12b550abe6e17eb79f7a05593a93ea055f3f6 - pristine_git_object: 1c9a347c0ad4801c3a1b941e6328061d23d7dcd5 - src/mistralai/client/models/listdocumentout.py: - id: b2c96075ce00 - last_write_checksum: sha1:fc3eca772d1e32938ea1bd2f3e98cdea5f1003f3 - pristine_git_object: a636b3deff66fe4277a63c04fc7dd6c5e74e58e7 - src/mistralai/client/models/listdocumentsop.py: - id: 3e42bdc15383 - last_write_checksum: sha1:d9beade6d8bb8050a67e32c2a73926b140015e68 - pristine_git_object: 0f7c4584d793c7e692a4bbc6678e18549b0e0364 - src/mistralai/client/models/listfilesop.py: - id: e5bd46ac0145 - last_write_checksum: sha1:3e0bc8a7318ffd1c3fe15f335ea2bc1e18c714a1 - pristine_git_object: a9af5c70c98adce56653ff01772fe5900530a36e - src/mistralai/client/models/listfilesout.py: - id: ae5fa21b141c - last_write_checksum: sha1:4bc8ef424beb41c75d9c6fa4e101d330a951a99f - pristine_git_object: 460822f71fe8b0fc6292b804dc2a9de29bff4ef5 - src/mistralai/client/models/listfinetuningjobsop.py: - id: b77fe203b929 - last_write_checksum: sha1:af98423b166930cd18a1d377ea688540f3364166 - pristine_git_object: 8712c3fa6ac24094532fdfc047561997ea34552f - src/mistralai/client/models/listlibraryaccessesop.py: - id: 581b332626b7 - last_write_checksum: sha1:0a6bd277a706d807d87d3f2a4f870cc6ba917928 - pristine_git_object: 2206310f301f6ea40f14a495f5f6c6b4e76dbbf7 - src/mistralai/client/models/listlibraryout.py: - id: cb78c529e763 - last_write_checksum: sha1:3cd81fd6f6d2421c6b6d06077f0bf1d5b3c96cad - pristine_git_object: 39fa459f7cc7be17c751025287d7827c9d141aac + last_write_checksum: sha1:d01507ab0a1f6067cbc65aaba199de340ccc68aa + pristine_git_object: dfababa694305c96f98ddebf2f09e448e737c855 + src/mistralai/client/models/legacyjobmetadata.py: + id: 0330b8930f65 + last_write_checksum: sha1:3c2f669a05cc01227f62d6a8da1840d9c458d52f + pristine_git_object: 5757675895b3c56d8aa7c174deb08567e596ecf8 + src/mistralai/client/models/libraries_delete_v1op.py: + id: b2e8bbd19baa + last_write_checksum: sha1:ba41496bc99040f7598659c5b037b955b7f6d385 + pristine_git_object: 893ab53b11672edd9cde175e68a80d89ff949cb6 + src/mistralai/client/models/libraries_documents_delete_v1op.py: + id: 81eb34382a3d + last_write_checksum: sha1:66d1c6ec5e2535b0db72a3beac65b25a1f2336d7 + pristine_git_object: 0495832efba33314f3cd28fe62759c6dac5ca706 + src/mistralai/client/models/libraries_documents_get_extracted_text_signed_url_v1op.py: + id: a7417ebd6040 + last_write_checksum: sha1:030ca9fb7e10396e6b743ee644fe1a734e1df1f0 + pristine_git_object: 186baaed8346d106272fea2e4826587634b061bc + src/mistralai/client/models/libraries_documents_get_signed_url_v1op.py: + id: d4b7b47913ba + last_write_checksum: sha1:fdad7a6d3ae9a9c69009caf8207b284835675a9a + pristine_git_object: ebcf85d77ed6982d510ae95a6971e1d4b3ad56ca + src/mistralai/client/models/libraries_documents_get_status_v1op.py: + id: f314f73e909c + last_write_checksum: sha1:11d463eb328a1133658e8ff92340edc7f75923e4 + pristine_git_object: 1f4847874cdeff26caaf5fd16e0f8382834ecb2b + src/mistralai/client/models/libraries_documents_get_text_content_v1op.py: + id: 1ca4e0c41321 + last_write_checksum: sha1:26133a83bf0ef063c78069da1bbb96d58f44f30c + pristine_git_object: e0508d66fce682ed20a029604897137940689327 + src/mistralai/client/models/libraries_documents_get_v1op.py: + id: 26ff35f0c69d + last_write_checksum: sha1:e87e56e8fb9f7c11d61c805362db4755a81186b9 + pristine_git_object: 857dfbe60c57af8b0fa6655a049ed336d70fb941 + src/mistralai/client/models/libraries_documents_list_v1op.py: + id: 756f26de3cbe + last_write_checksum: sha1:5a1a9e025dc7a1fedaa5199d396a73c4986d4113 + pristine_git_object: da7d793b65139a3894b077a9665b392e8a44e8a2 + src/mistralai/client/models/libraries_documents_reprocess_v1op.py: + id: dbbeb02fc336 + last_write_checksum: sha1:bd5013cb1581dd13642ce7edf1e701f5b0c967c4 + pristine_git_object: a2f9ba2a0465fb3a8eb3b9afbb41d813de348656 + src/mistralai/client/models/libraries_documents_update_v1op.py: + id: 734ba6c19f5f + last_write_checksum: sha1:e12ca003680ff17523fe09438cd8f71d00ea081e + pristine_git_object: 7ad4231f72901b675d9af67c33364592c86be5ab + src/mistralai/client/models/libraries_documents_upload_v1op.py: + id: "744466971862" + last_write_checksum: sha1:9691ac41ecf986c9ccfad81423d367f96b10f4b7 + pristine_git_object: 388633d1c7e906803b711ef2bbf37656624515a9 + src/mistralai/client/models/libraries_get_v1op.py: + id: d493f39e7ebb + last_write_checksum: sha1:25b3c2c1040cd73ebd6b988b8b27708831affefd + pristine_git_object: 7a51d6053aa2cf2e6524a80487fe9549eec3dfa1 + src/mistralai/client/models/libraries_share_create_v1op.py: + id: feaacfd46dd3 + last_write_checksum: sha1:72e07fb60edbe1989865ba2ac90349edeb183f7e + pristine_git_object: 00ea74824b2efc4150d2e547e2eee416e5f6f2ee + src/mistralai/client/models/libraries_share_delete_v1op.py: + id: 7f3a679ca384 + last_write_checksum: sha1:897857c11cf0c14a0a81ef122dec4395dc16c0ce + pristine_git_object: eca3f86a6135e702f8cb6412a5f215dac2335a8f + src/mistralai/client/models/libraries_share_list_v1op.py: + id: 8f0af379bf1c + last_write_checksum: sha1:d27e0360c504576c315350fc226d371da455a598 + pristine_git_object: 895a259059283a17cc7558e3cc03022e2d4dd259 + src/mistralai/client/models/libraries_update_v1op.py: + id: 92c8d4132252 + last_write_checksum: sha1:a252f68e65cdb47e27d7059f256381daf2847344 + pristine_git_object: 54b0ab708c665ccb841b1c8d0f2748c390850506 + src/mistralai/client/models/library.py: + id: 028a34b08f9c + last_write_checksum: sha1:65f02f963a0540385681b88c7c7fba98d0d704f4 + pristine_git_object: 1953b6fbc6d7ad245ccacd9d665fb29853b00af7 + src/mistralai/client/models/listbatchjobsresponse.py: + id: 99d94c86a871 + last_write_checksum: sha1:7530be5f80a0756527be94758e800e8118e53210 + pristine_git_object: 35a348a1160dcf6d82d58c70cea07e11730359fb + src/mistralai/client/models/listdocumentsresponse.py: + id: f593d8e66833 + last_write_checksum: sha1:0d842168856056ff681b2a1c36b87df8e0d96570 + pristine_git_object: c48b8c051ad0d1fb4aed8396697e57e782be5a40 + src/mistralai/client/models/listfilesresponse.py: + id: 85d6d24c1a19 + last_write_checksum: sha1:caf901685bfb6f13d707b89726aaf6e5116cd054 + pristine_git_object: 10a60126600343033a4b0511d717cac6f1924b4d + src/mistralai/client/models/listfinetuningjobsresponse.py: + id: 118e05dbfbbd + last_write_checksum: sha1:f0582740a6777039e9695d97f072b5a3c34b483e + pristine_git_object: 1e434c5986bf577e2b42cca943cc6896a83d1fa2 + src/mistralai/client/models/listlibrariesresponse.py: + id: df556a618365 + last_write_checksum: sha1:55afb46b1fa797bc46574e5256cd063574c6fcbf + pristine_git_object: 337fe105731d8f3ced1f8f1299ff4081b9d5bfbe src/mistralai/client/models/listsharingout.py: id: ee708a7ccdad last_write_checksum: sha1:18e6501b00a566121dfd6a1ce7b0e23fef297e45 @@ -2422,208 +2414,192 @@ trackedFiles: pristine_git_object: a95098e01843fe3b4087319881967dc42c6e4fef src/mistralai/client/models/messageinputcontentchunks.py: id: 01025c12866a - last_write_checksum: sha1:9eab6d7734dcd4bf9da5222c1927f5f40ef45db0 - pristine_git_object: 63cf14e7fcbc7c3969220b4f07109473b246bf49 + last_write_checksum: sha1:6a0988d4e52aa2e9f7b09ae1e3266ecf9639c22b + pristine_git_object: 1e04ce24d62db6667129b35eb28dabcfd4135ea8 src/mistralai/client/models/messageinputentry.py: id: c0a4b5179095 - last_write_checksum: sha1:b1b8f5b78eb5f57f5cfa7163ed49101736bcefaa - pristine_git_object: 15046d25130cda6571f07a456c2b5a67d2a3bcc0 + last_write_checksum: sha1:b5bad18b88c0bfbbddfdafa6dc50a09e40a6ebd7 + pristine_git_object: c948a13e3cc2071dd1b3d11c419ea61d51470152 src/mistralai/client/models/messageoutputcontentchunks.py: id: 2ed248515035 - last_write_checksum: sha1:df4ef4d17ce48df271ff2b8cab297ae305aa08ec - pristine_git_object: def7a4d27cd3d1479864a1d6af19e89bd57bff70 + last_write_checksum: sha1:dc7456e44084cba9cc6a46553fd64b1eb25f8d77 + pristine_git_object: bf455d17db16e4bc11da0ebb105a9f6ad4d63c01 src/mistralai/client/models/messageoutputentry.py: id: a07577d2268d - last_write_checksum: sha1:0633b8c619883bedb1a6ad732c5487c7e7f817f9 - pristine_git_object: 8752fc36bfec39e0ab79d4593ae0cb43ea00641c + last_write_checksum: sha1:38ad03422407925087835ab888c0be40bf5fa7fa + pristine_git_object: 6a9c52ed59af1497577be2538e7141d57eea4c8f src/mistralai/client/models/messageoutputevent.py: id: a2bbf63615c6 - last_write_checksum: sha1:bbdb2c840a7a196edcb6ac6170e8273cc47a495e - pristine_git_object: 39c1013939ea238cb1c7ccbc05480a6840400061 - src/mistralai/client/models/metricout.py: - id: 92d33621dda7 - last_write_checksum: sha1:6198ba9e2cd66fcf7f9fcc1cf89481edd432cf11 - pristine_git_object: 5705c71283ce7d4a01d60752657f39279c0f1f85 - src/mistralai/client/models/mistralerror.py: - id: 68ffd8394c2e - last_write_checksum: sha1:8b867eca5ca81aa6364f13c9d7e42f9b0d855724 - pristine_git_object: 862a6be8294db5b30bb06cb7b85d60c52ed8e8c9 + last_write_checksum: sha1:c3317ab9279c499dd7fb26f45799ca9369676ac7 + pristine_git_object: d765f4fd3c4e43c37063833368e4b21cc0bfbcf2 + src/mistralai/client/models/metric.py: + id: c6a65acdd1a2 + last_write_checksum: sha1:5ef7c75b278f16b412b42889ff0f2fc19d87cb7d + pristine_git_object: 1413f589f7f23991a12c1367bc6f287b5e07d4a4 src/mistralai/client/models/mistralpromptmode.py: id: 95abc4ec799a last_write_checksum: sha1:a1417b987bb34daeb73ca4e015c085814e6c8ad2 pristine_git_object: 9b91323e7545d636308064085ca16fc554eac904 src/mistralai/client/models/modelcapabilities.py: id: 64d8a422ea29 - last_write_checksum: sha1:5bc65733cf1c2f4ee8e1b422636fda754bdf8afe - pristine_git_object: c329efbcd9be212c7428c09f28f897834c9239d3 + last_write_checksum: sha1:0f733a45f06cb2c603b47134d999a2de4c0a7bb0 + pristine_git_object: d9293ccc163995cfe0419d05c90fe1ae8e75cf57 src/mistralai/client/models/modelconversation.py: id: fea0a651f888 - last_write_checksum: sha1:6186e845be2717da6116e20072835c050d3fdaa5 - pristine_git_object: c0bacb7fd9cd052ecb31a72c6bf593504034e069 + last_write_checksum: sha1:4c1b31d95351dea877e24bd452b32d8e22edf42e + pristine_git_object: bb33d2e0e047bc075cb7ae284958b80a5b5ee657 src/mistralai/client/models/modellist.py: id: 00693c7eec60 - last_write_checksum: sha1:89695c6a680da571c7a77c4544607bd83b3a93d5 - pristine_git_object: c122122c38a3331337cc702340cf1d3e0c9ef99d + last_write_checksum: sha1:de62fc6787f482e5df0ff0e70415f493f177b9a1 + pristine_git_object: 5fd835f24cd1098a153ebfb3e958038a183d28a7 src/mistralai/client/models/moderationobject.py: id: 132faad0549a - last_write_checksum: sha1:742d942d72b615432c066827b822290cf4d51d40 - pristine_git_object: 9aa4eb15d837ab2af97faa131a362d50a3a85482 + last_write_checksum: sha1:a8c1454a533e466216ef98dd198ae8959f51fa76 + pristine_git_object: e7ccd8f6f1f75704a973be7ebabc49617070c34a src/mistralai/client/models/moderationresponse.py: id: 06bab279cb31 last_write_checksum: sha1:b9158e575276c1e0a510c129347b9a98c5a70567 pristine_git_object: a8a8ec3d8d8a58deb3c1f8358c6dce5a9734f89c - src/mistralai/client/models/no_response_error.py: - id: 2849e0a482e2 - last_write_checksum: sha1:35b1651843a697024146d6377838b5b99c5c66d3 - pristine_git_object: 7705f1945567498ce606364490685a91b13cd8dd src/mistralai/client/models/ocrimageobject.py: id: 685faeb41a80 - last_write_checksum: sha1:663f11a19e067d424263eee40d8127cdc56fb72e - pristine_git_object: e95b67e17e51653bf194ad1cff3a926f34cf97c2 + last_write_checksum: sha1:13f4e4d33d8fb5b0ee842695d4cc8329bd7ca382 + pristine_git_object: 365f062b5674141aad4b1601a85bec7a56db4cff src/mistralai/client/models/ocrpagedimensions.py: id: 02f763afbc9f last_write_checksum: sha1:f572ed8992ba1ba4d53b705c4e8c94c85ae1290e pristine_git_object: 847205c6c74a621dd2ee6d9eb18d1acba8395c50 src/mistralai/client/models/ocrpageobject.py: id: 07a099f89487 - last_write_checksum: sha1:10cbd1dddcb1f1f5d530048130908ad0ce715928 - pristine_git_object: 4f4ccf43011fa2563f79bb70ae2a813b84f04074 + last_write_checksum: sha1:5089ac3f02e4225d6c95cc9f05b74013694536da + pristine_git_object: ffc7b3b615e17a8e0d76fea4081249b143d8fe4d src/mistralai/client/models/ocrrequest.py: id: 36f204c64074 - last_write_checksum: sha1:8e669292b846a5af4e3cee0b632524696e3067bc - pristine_git_object: 18b899dd5ecc162dc8e92622f56bed503fff80f7 + last_write_checksum: sha1:9e9009dace9ff36cbff0cb8de408a1e0585147a7 + pristine_git_object: 4ad337ced23b3bdad21785b8dc3fcadbb868d4f0 src/mistralai/client/models/ocrresponse.py: id: 2fdfc881ca56 - last_write_checksum: sha1:4a28dbfcc076c149e4f08a830d4d7f770836eb15 - pristine_git_object: 0a36e97500b4f62adac2526d7dd7cb85c9bdb8b8 + last_write_checksum: sha1:f1d18dbf4cd02f3598ae574d5033c30989fa6985 + pristine_git_object: e63eed987f4eb83f3406b15cf4d840fd43528a49 src/mistralai/client/models/ocrtableobject.py: id: d74dd0d2ddac - last_write_checksum: sha1:3116548673509f4e9f6a50d39f58ce3374823cc4 - pristine_git_object: e32ad894cd97546e635d12595051da103cde9fd8 + last_write_checksum: sha1:492f8e4c30b61330592768b13cffcf9a9eb2c0fa + pristine_git_object: 66bb050f30790c3fc51cdca1b73e847388fe50c5 src/mistralai/client/models/ocrusageinfo.py: id: 272b7e1785d5 - last_write_checksum: sha1:b8fb06d0dad22f958ac756e65d70f5ba410ad47a - pristine_git_object: a421d850450bb3f0b62853c931cd457434d2f174 + last_write_checksum: sha1:2b37766fdff72e7ec6e052f248362f7bb3989d2c + pristine_git_object: 2ec1322b29d7fe5246b9ad355a4997222b37970f src/mistralai/client/models/outputcontentchunks.py: id: 9ad9741f4975 - last_write_checksum: sha1:afb76f3af2952c2afab5397e348ddfd6dbb56c4f - pristine_git_object: 1a115fe8b4874a6bd86719d91332cd3db6d95b46 + last_write_checksum: sha1:16c43816ac7b7afd134bce1cda5bb44485d9fafe + pristine_git_object: fab7907b105cc9d9c738c5cca9c09eba9d5c4781 src/mistralai/client/models/paginationinfo.py: id: 48851e82d67e last_write_checksum: sha1:166961e2c0f573ba0677ee803820bb944a8a5efb pristine_git_object: 2b9dab6258249f7be87e1d4a73a2502e21fe1f0d src/mistralai/client/models/prediction.py: id: 1cc842a069a5 - last_write_checksum: sha1:ca391fc2f9faf1657392ceda952c2ee422121952 - pristine_git_object: 52f4adf1eb46d7d5679f9705871cd73e08ae8830 + last_write_checksum: sha1:3ee24375eb7f00cea0c9db6eebc564ce7067f295 + pristine_git_object: 0c6f4182ca8140e595f601b12fbd582034257587 src/mistralai/client/models/processingstatusout.py: id: 3df842c4140f last_write_checksum: sha1:007a476e4101cac4d2a9eef94d289f0f486d763a pristine_git_object: 3acadcc9792c286cd31031a80e108b74bc2c0c4e src/mistralai/client/models/realtimetranscriptionerror.py: id: 8c2267378f48 - last_write_checksum: sha1:b9642dd42c4092bdebe0a4f8d35c68152f259c05 - pristine_git_object: f8f2d3da9598ce0cd90d148ba1a9be0c5d6237cc + last_write_checksum: sha1:78637de61d6fc3bc1fff8e95c0a6f5ffc1a3e111 + pristine_git_object: c661e46100752119521f63045e8ebe79105ecc01 src/mistralai/client/models/realtimetranscriptionerrordetail.py: id: 5bd25cdf9c7a last_write_checksum: sha1:a226b10718b1fe4a661311cbd98ea3b1d1ac4163 pristine_git_object: cec1f6eabd44ceab4e58694a0862c9c90ea2f264 + src/mistralai/client/models/realtimetranscriptioninputaudioappend.py: + id: 8b03cde6e115 + last_write_checksum: sha1:abcf48a48b077e836e2ae5647d93bd61007b9aa7 + pristine_git_object: 8156a2704bd95b74875f7a9ac17191e026f08993 + src/mistralai/client/models/realtimetranscriptioninputaudioend.py: + id: c187ba1b551d + last_write_checksum: sha1:fa96156774481ca3b98f8c0f99b3b1db01280b37 + pristine_git_object: 473eedb744141faa3447929865a76129d5e96432 + src/mistralai/client/models/realtimetranscriptioninputaudioflush.py: + id: b27b600c310e + last_write_checksum: sha1:8a8eb7de4137cf8cd810d93d984009bf8dff51c4 + pristine_git_object: 553d14c7720b3d1388901989d8160f0e3318ba56 src/mistralai/client/models/realtimetranscriptionsession.py: id: 02517fa5411a - last_write_checksum: sha1:0073b248604f667e89e34cf01184a788ca84d63f - pristine_git_object: d20d0d8c94aeec425a2c1dfb93b72ac6878cb8af + last_write_checksum: sha1:eb9a23fb89e0bdb3bb6168f512488a98bd626bc1 + pristine_git_object: a74a457b1e54deb1fcd203ce5ff2c57691f16b18 src/mistralai/client/models/realtimetranscriptionsessioncreated.py: id: 4e3731f63a3c - last_write_checksum: sha1:d3fb5c5dc417a0ebb12a30770324674e055526ae - pristine_git_object: c4fa5774502699529e27870436ca65b9f88ccfe1 + last_write_checksum: sha1:6997848cf22dc90b10597eaf9f0dd966ace969af + pristine_git_object: bb96875ab913f3d6ff241a00d94a87e877637782 src/mistralai/client/models/realtimetranscriptionsessionupdated.py: id: 686dc4f2450f - last_write_checksum: sha1:7e4de1020672efc3503cda5b916b41056bf1d22b - pristine_git_object: a61fb05e8e5ba3ffa20bbb98bf61c17045c1f75c + last_write_checksum: sha1:e023fe0c8c54da644fc797c25dfeb070b6f0fd1c + pristine_git_object: fea5db4a1b956cb8253e4f147463c47958bfd989 + src/mistralai/client/models/realtimetranscriptionsessionupdatemessage.py: + id: 4e1b3fd7c5a3 + last_write_checksum: sha1:7da202e016b1d1dfc36a13ac03e3b419f0952cd2 + pristine_git_object: 07ad59a41f8a16b9c23c4e0be503a801ec0e2dd6 + src/mistralai/client/models/realtimetranscriptionsessionupdatepayload.py: + id: 7033fdb33ad4 + last_write_checksum: sha1:812f072a9effe1ce44e56094121ed10b3a83e39d + pristine_git_object: a89441e91dff4b7a78e8dd247b43243e89bf129d src/mistralai/client/models/referencechunk.py: id: 921acd3a224a - last_write_checksum: sha1:0dcff62499afdb1db0fd4f46614f8680f94837f4 - pristine_git_object: 7634d8ae07c96a99e634dcf888077f1d8cc4dc67 - src/mistralai/client/models/reprocessdocumentop.py: - id: b2913a7aa5c9 - last_write_checksum: sha1:07174ee58ec12909f08a08a9a6d7427ee9b2d5d0 - pristine_git_object: 48a4b72bf285e2f2e4b2d0c352ebc463518ed712 + last_write_checksum: sha1:a8bff06a2a040556bce8e6212973a774bee6bd34 + pristine_git_object: e0bbae4e08275e82010080d4ee84612e01a07f81 src/mistralai/client/models/requestsource.py: id: 3f2774d9e609 last_write_checksum: sha1:1ce68530a46793968f1122d29df722f0a5c9d267 pristine_git_object: fc4433cb4e657b06aa6a4c078094c2df342810e2 src/mistralai/client/models/responsedoneevent.py: id: cf8a686bf82c - last_write_checksum: sha1:376c2a65f1fcdfe20d7cf0bd6aa6d8870a4f32c1 - pristine_git_object: ed331ff12c8728290b8ad17e52d9384265233665 + last_write_checksum: sha1:144a8bf407391948946f3f5362db78a33c45ee6c + pristine_git_object: be38fba81c08088303c4342c99ac3628c5957785 src/mistralai/client/models/responseerrorevent.py: id: b286d74e8724 - last_write_checksum: sha1:ecff834ec62bf46d2aa5d9753f3898ed86caad45 - pristine_git_object: 8f196a52b469458da5c9f072983870da8c4fc4ea + last_write_checksum: sha1:df3f53344624082471c795131552689510946345 + pristine_git_object: fa4d0d01c1cb7f15d6f469279c2000d2fad8f459 src/mistralai/client/models/responseformat.py: id: 6ab8bc8d22c0 - last_write_checksum: sha1:e0c29239b4cd698af50412a1cab85217ccbb1796 - pristine_git_object: 409b80d658e4c93f4ee25c218fe74d65fd84ad44 + last_write_checksum: sha1:0ab455566c6bb0b63e2cb1f61f300266021f5954 + pristine_git_object: b2971412549cc5b95c53b93425dbd5b6503a4df7 src/mistralai/client/models/responseformats.py: id: c4462a05fb08 last_write_checksum: sha1:3cb82d44a4f9df5e9a3f51867be6eab1d439d87a pristine_git_object: 21345778ad2d41a3746292e67fec628f9ec2a84d src/mistralai/client/models/responsestartedevent.py: id: 24f54ee8b0f2 - last_write_checksum: sha1:8be1513409934d7ea1c524e468954f7eda0a8c62 - pristine_git_object: 256d2a6c864edf4f3ccd77b2db139c11fe4f6727 - src/mistralai/client/models/responsevalidationerror.py: - id: c244a88981e0 - last_write_checksum: sha1:74a39321dee69f3487d9b9e01ffb2e40715176f4 - pristine_git_object: 1ed0d55266a106364fe58aa1e476fafbfbbbfdfe - src/mistralai/client/models/restartconversationop.py: - id: 2f6f3e4bbfd8 - last_write_checksum: sha1:9500d3ebea040ff4a203f3f025ff1bff8a397235 - pristine_git_object: b09eaed5bc8ecdbb7f1952c97b2e665462c70f9e - src/mistralai/client/models/restartconversationstreamop.py: - id: 16dc9ee5bf22 - last_write_checksum: sha1:b16f54529f4fd7d1422c82ff1a6dd5a9a82ba6bd - pristine_git_object: 3b2025f536d1c54ed58064b4be33aaafb9297ac4 - src/mistralai/client/models/retrievefileop.py: - id: ee73efdf9180 - last_write_checksum: sha1:330ec0a78a7ba623f21cd378b53250045bea984c - pristine_git_object: edd50e571cf56c6c22acc1777f6c9af38787f07d - src/mistralai/client/models/retrievefileout.py: - id: 8bb5859aa0d0 - last_write_checksum: sha1:1077bdb8fcc5ba22b2deb7f5c95fefe7b1fb82f5 - pristine_git_object: 2abf2161cd61d84f04836740a526c0e3711c3f6d - src/mistralai/client/models/retrievemodelop.py: - id: d883baa79c9e - last_write_checksum: sha1:525c7e9cf8594433cbb21374422067a75e6b53a9 - pristine_git_object: b4334e9a5541a14f7916244761213b883d507a41 + last_write_checksum: sha1:f66a0a67444916e838ca9a63144fb661832b54b9 + pristine_git_object: 84abfcd9ac159b9bd9234ff015d5525d88d663f6 + src/mistralai/client/models/retrieve_model_v1_models_model_id_getop.py: + id: 6fefa90ca351 + last_write_checksum: sha1:52775e73fb5c51d245362ce63672cec776e5b6bd + pristine_git_object: cd5955c1eadb8cd9d1f9ecc388c2cc17df11c885 src/mistralai/client/models/sampletype.py: id: a9309422fed7 last_write_checksum: sha1:86a61340a647696f6c35a82d945509b1c85aa6f7 pristine_git_object: dfec7cce1e22ab607b6a9e947fa940284426086d - src/mistralai/client/models/sdkerror.py: - id: 12f991dad510 - last_write_checksum: sha1:c2c344c8b7e23b0c93eeafedd25d28582467c3a7 - pristine_git_object: 101e1e6a67c3672e899b39dbfe10d45550a4449a src/mistralai/client/models/security.py: id: c2ca0e2a36b7 - last_write_checksum: sha1:cec2a544790c2178f92742ac88e546efeacedb40 - pristine_git_object: 4fa8b4b2651f1d13811faf2da6e481243ea84e5a + last_write_checksum: sha1:d74333517caae2a1aa58517e8e935e46913bcc66 + pristine_git_object: f3b3423e850a1afa3b0fa5fa6c94f6018ff70627 src/mistralai/client/models/shareenum.py: id: a0e2a7a16bf8 last_write_checksum: sha1:15a84d57ceeb74cfb37275f714954e42d8e9b3ba pristine_git_object: 08ffeb7e46fbbc28b7c93ef2aa4a49aff7c0d35e src/mistralai/client/models/sharingdelete.py: id: f5ecce372e06 - last_write_checksum: sha1:c5e4e6df47ef2d5715a99533a1efd936f0e7e16e - pristine_git_object: 202732cf785074446cd24360dd9c540768e4134f + last_write_checksum: sha1:247d793bd1ddc0ad35d010c17e5b32eba826e3a1 + pristine_git_object: 33ccd7e71b8f65d2a9329d8632b5446ca0431d0a src/mistralai/client/models/sharingin.py: id: e953dda09c02 - last_write_checksum: sha1:f60bd60d37f0accadf50ea111055fd99aa190a5f - pristine_git_object: 8cc3e8968d9d5460f040ebdb66d8f460e86d2c96 + last_write_checksum: sha1:7c2b5333c634ed7889fc907edbf89c6066db5928 + pristine_git_object: 7c1a52b049db4afbd6a06b5f39966dbec4f862ba src/mistralai/client/models/sharingout.py: id: 0b8804effb5c - last_write_checksum: sha1:362bda8a5bd70d12e2de33814d3bd36a61c6d7ae - pristine_git_object: 778071546c12c2636d2deec6042e6b686b6428c6 + last_write_checksum: sha1:a78e4f6bf2f49ae8250787e1680b5004563b32ac + pristine_git_object: ab3679a4cbcc2826ff2672a09e4eaf4990b5c6a9 src/mistralai/client/models/source.py: id: fcee60a4ea0d last_write_checksum: sha1:4d4277d75f7ce001780a069898b38afa7c8addc0 @@ -2632,214 +2608,190 @@ trackedFiles: id: 1733e4765106 last_write_checksum: sha1:3c79fc7c43cd018fba4950ba013ed15899b82ebf pristine_git_object: 0add960bc93f53df5ddda94892543a0857f32dd6 - src/mistralai/client/models/startfinetuningjobop.py: - id: "663886392468" - last_write_checksum: sha1:6a6a409dd481359e8d6593fa2ea817007f8a967d - pristine_git_object: 805a8721cc7d048f172e1096ead0e410c7d04928 src/mistralai/client/models/systemmessage.py: id: 500ef6e85ba1 - last_write_checksum: sha1:af68936119bf7c067aec5215e2654c56a5df1755 - pristine_git_object: 352eca76ad5051cc2c504c673a23e048642fe018 + last_write_checksum: sha1:a88de3fc70adab47943f867336659b3a1a6cdae0 + pristine_git_object: 2602cd2db03cd129b42b343f2dc79ce68106ac35 src/mistralai/client/models/systemmessagecontentchunks.py: id: 297e8905d5af last_write_checksum: sha1:e5695ca0ebdb0f02f3a0c527015df154a0c52b7f pristine_git_object: d480a219e935aaea91adc320de0003b562c0bbb5 src/mistralai/client/models/textchunk.py: id: 9c96fb86a9ab - last_write_checksum: sha1:4ad624afaf4d83d4e58f72bcbd15b9faecc703f3 - pristine_git_object: c0584234da572bb903894633b123b1dda29e7736 + last_write_checksum: sha1:89cbb66753d7a3585ce58c70219a349f770909cc + pristine_git_object: ac9f3137dddc15e1cd10aa6385b76510e6c23e33 src/mistralai/client/models/thinkchunk.py: id: 294bfce193a4 - last_write_checksum: sha1:d9c779959ed82ae3de66e481536d80bcc2ed57a5 - pristine_git_object: a999f5d7b824325085ec980cfa07294919408538 + last_write_checksum: sha1:9126c530e93ae7532235d4bfa3e2b202423a0f24 + pristine_git_object: 5995e6010bfb63d0ab2ded6e0f55b7dca23f769a src/mistralai/client/models/timestampgranularity.py: id: 68ddf8d702ea last_write_checksum: sha1:64e7b198a75f026590e26758112651d31984076f pristine_git_object: 8d3773752444db865c0e2629ad9eed66eb7f2bc6 src/mistralai/client/models/tool.py: id: 48b4f6f50fe9 - last_write_checksum: sha1:14e7b21a2857e2ca36830730a47d0eca476fb491 - pristine_git_object: a46d31f166618fd5b92b7e76ccb9190796af7cd2 + last_write_checksum: sha1:7e33d7a0349e652b40926f6a51240b9a5c1a7dbd + pristine_git_object: 2b9965e571eeb494f8cf867818aab488198ecdb2 src/mistralai/client/models/toolcall.py: id: fb34a1a3f3c2 - last_write_checksum: sha1:15ed0a4611e8c310640ec4622af8019d2db93355 - pristine_git_object: 4a05bbd04a44446babda8419dcf4d4c93248fe41 + last_write_checksum: sha1:7d0275444dd6be291c091e908a2b7f2fc536f20f + pristine_git_object: 181cec33c904535c804de06c7357bd493647cd70 + src/mistralai/client/models/toolcallconfirmation.py: + id: f2e953cfb4fe + last_write_checksum: sha1:554a2e073917ffb479efe5887c0b59a2f4967c6e + pristine_git_object: fd6eca50a7ec2f4cca2ae20958717881660e0ac5 src/mistralai/client/models/toolchoice.py: id: 14f7e4cc35b6 - last_write_checksum: sha1:358a6e88486b4d372c9041dd15c0206b119bbc32 - pristine_git_object: aa2016fb63290c63f9b8f3e18c552f6598f15c8f + last_write_checksum: sha1:a787827a4f4ecf5b6a7068ba94fd1ff074898b51 + pristine_git_object: cb787df1b62190319c6e9679521228af28ee7204 src/mistralai/client/models/toolchoiceenum.py: id: c7798801f860 last_write_checksum: sha1:5388b2a6fad842f8e4ae79e6257b4d14c122a6ff pristine_git_object: d66c3d07058eb87bcc3eec10de99a616b5f6638a + src/mistralai/client/models/toolconfiguration.py: + id: faec24b75066 + last_write_checksum: sha1:912c1c10e88053ae4ee44af763c9ab7c95339f5d + pristine_git_object: b903c8b6c13777b671faf5aa97994117734b3a8f src/mistralai/client/models/toolexecutiondeltaevent.py: id: df8f17cf3e07 - last_write_checksum: sha1:6ad6e219f3d7512c9fd382fb22471bfaa0fc9b09 - pristine_git_object: 384ec2407848f51434ca378ad7de965c584b163b + last_write_checksum: sha1:2537a6e2dffde3760a064fdf92efa6cdc117ba2b + pristine_git_object: 5a977ca6fc5bfdeadd929f18037fb5c9a9582b40 src/mistralai/client/models/toolexecutiondoneevent.py: id: 514fdee7d99f - last_write_checksum: sha1:09ef4842c50419eda746f3361454c4df0c3c2466 - pristine_git_object: 56f28899b8b4161fcddfec0ed2610486fe6f8b06 + last_write_checksum: sha1:d62f57105e4816e03030bc9a2a5645482ea80c55 + pristine_git_object: 1c9b0ec92d87a8559ef050a21ba309e05f6b0314 src/mistralai/client/models/toolexecutionentry.py: id: 76db69eebe41 - last_write_checksum: sha1:ff84f62c5264aa023f412956cf83604ecc4112a9 - pristine_git_object: 158cbf06a2acdd492ddb91ae8eaca4802da9f359 + last_write_checksum: sha1:9a697fdad4178b95d7d1bd1eaee77ef948fb2d4f + pristine_git_object: 0d6f2a1305f262519ba719969c6e62ceb95e52b3 src/mistralai/client/models/toolexecutionstartedevent.py: id: 40fadb8e49a1 - last_write_checksum: sha1:5ba46ca1583e8245736a0ae81372025482a8504b - pristine_git_object: 1591866981ce1439fbce3736f028b15205d95810 + last_write_checksum: sha1:9f6e43d5b2c807ca3b080ea7bd4878ba3ec2a788 + pristine_git_object: 21e5bfa8fea7fa27b7031b740f72a873760700cc src/mistralai/client/models/toolfilechunk.py: id: 26c8aadf416a - last_write_checksum: sha1:1dd468876a2ff5ec8b15b6f4e6b8f812e640a29a - pristine_git_object: 6eebd562b1707b41b81e2fd0e267e4c8698551de + last_write_checksum: sha1:89bb203aa600bf6a516fbe10e1787a132de9ca5a + pristine_git_object: 0708b3ff4c4f97a0e4c4359baeedc89ef0b10278 src/mistralai/client/models/toolmessage.py: id: 15f1af161031 - last_write_checksum: sha1:809936ebaeb4541f862aed6d26e1d1f5ff0ae58a - pristine_git_object: b3e8ffd9294bf6b0b46b26097abb87a5b96c9302 + last_write_checksum: sha1:cfa16352cf5bbcd6eedbfbf7f3002149fd989418 + pristine_git_object: 05a0ee636a4393e3ce65cc1b6e272ddf8ec79254 src/mistralai/client/models/toolreferencechunk.py: id: 822e9f3e70de - last_write_checksum: sha1:f02c38c892580a6287156551e7964c601a239220 - pristine_git_object: 3c76c8c2dcc86d225c5218fa13cd43a693230fa8 + last_write_checksum: sha1:f5c9265e27fa2d4526e5ce50dff7f7bd641eb642 + pristine_git_object: 95454fe891dd3955121565431897c1b8f0c25083 src/mistralai/client/models/tooltypes.py: id: 86c3b54272fd last_write_checksum: sha1:e90c15c1e645a5f207af0c7ac728cb0a521c6706 pristine_git_object: e601c1967c42ef8d0c2eea98bc5c0ca722cde066 src/mistralai/client/models/trainingfile.py: id: 2edf9bce227d - last_write_checksum: sha1:668f05a3e3b883c2f54b1e541f1fb501605456b0 - pristine_git_object: 1f710ff81c046261ea497505d7216a1208c65d5b + last_write_checksum: sha1:8fd6a2560554b3c2166daff2ff1a48bb49053489 + pristine_git_object: 2faeda8bfb38c810c5d80eb17cc9928c49c7caf5 src/mistralai/client/models/transcriptionresponse.py: id: 60896dbc6345 - last_write_checksum: sha1:3e5c20911697f5569029932fe0910da94feb2b06 - pristine_git_object: 786863ec331a4bdca18ac056f7447d11010d4320 + last_write_checksum: sha1:e8a318798dfe4ebd64c9d64f487f7e3e8dd05532 + pristine_git_object: 70315463ff8e01c680aa80d68bdc32a7429ddb16 src/mistralai/client/models/transcriptionsegmentchunk.py: id: d1e6f3bdc74b - last_write_checksum: sha1:0107b6ee9160cd2a8309f7c8465502d7d0be90a8 - pristine_git_object: c78bec3068b95782bdc271c2e1ee645b115fed32 + last_write_checksum: sha1:ee56c437444cbfa7983ba950e3e166f392d208cb + pristine_git_object: b87bfc2f9de0a07d62e8cc1fe265a9c29f56f194 src/mistralai/client/models/transcriptionstreamdone.py: id: 066a9158ed09 - last_write_checksum: sha1:3a6abc6f1a0ad78d73e32f3d40ef4bb425aee5b5 - pristine_git_object: b5740b3bb62b4db3846b7727b15e18502e39d862 + last_write_checksum: sha1:cb8ea2e34c712ef1694bd1b6a83e7eed9318b13b + pristine_git_object: e3c5016901a2400c222e5b821b5afb312af1a1e6 src/mistralai/client/models/transcriptionstreamevents.py: id: b50b3d74f16f - last_write_checksum: sha1:f688a18317bd048ad89881c35cb80e39bb7cba47 - pristine_git_object: 17161a177721e44a40903cf15bf08ad0b56545de + last_write_checksum: sha1:68f82eea8a0bcf1b8b65cedf9e276f34121d398b + pristine_git_object: 073fd99aebf6f90027a45c8ee4daa7ffeb8ee34e src/mistralai/client/models/transcriptionstreameventtypes.py: id: 6f71f6fbf4c5 last_write_checksum: sha1:1d568460b1521f17dd5e551632ae4d7883a98dd3 pristine_git_object: c74bbb7483cc3981ee3638c80c15924f3e1c20c4 src/mistralai/client/models/transcriptionstreamlanguage.py: id: e94333e4bc27 - last_write_checksum: sha1:17c7b082ebf5764e21f124fe4c6a6ee5cea4fc51 - pristine_git_object: 67b3e9791efaf134580d82c2a12fab1cd33efbb1 + last_write_checksum: sha1:d1ee93b09ca377bc29845924d53db3ccf250269d + pristine_git_object: b6c6190684eccdc3fe6ce4bc7b86f5ee6490a197 src/mistralai/client/models/transcriptionstreamsegmentdelta.py: id: c0a882ce57e5 - last_write_checksum: sha1:12cbfcf02d5cb4979a836e429690786153250bf0 - pristine_git_object: 8db5e73619eab98c3751689a7ec5bef45ef9ef6b + last_write_checksum: sha1:3507a0355027136e92ada0c9766277381d5dee96 + pristine_git_object: 32ef8f9b2aa34253ea10c830ae856a931306f658 src/mistralai/client/models/transcriptionstreamtextdelta.py: id: 6086dc081147 - last_write_checksum: sha1:6b371b5d236e6e767f25160ab0e8a49bcaf356f8 - pristine_git_object: 49338a083332467e64f171637ca04365ca6bf25b - src/mistralai/client/models/unarchiveftmodelout.py: - id: 9dbc3bfb71ed - last_write_checksum: sha1:40a23dc39af81f06b23f21dad45c5c5f1178b2af - pristine_git_object: 0249a69e8552ed00a5e1f505fdc16025c46d2477 - src/mistralai/client/models/unarchivemodelop.py: - id: eb18584fd78c - last_write_checksum: sha1:5b81357950f301a82233b58a3e2a5b232fdbf546 - pristine_git_object: 1d68a06ae41559baffb6d54398b52dec630556c7 - src/mistralai/client/models/updateagentop.py: - id: ae3a6abea468 - last_write_checksum: sha1:3867948bd0ea37b597c4e5ef7a2e6881791a5fa5 - pristine_git_object: 28acc83d8df1373e897f9634dfbb84ee28897717 - src/mistralai/client/models/updateagentversionop.py: - id: 3821dca5b20a - last_write_checksum: sha1:4c41a450278858089c7cb23b8fcf1e4184fa1f1d - pristine_git_object: 114013bcdcfb7d7c9e935285f167a004b65fbd09 - src/mistralai/client/models/updatedocumentop.py: - id: eee9ef317180 - last_write_checksum: sha1:7d9fc6e18e6631cfe9cd1bc2fa5f7d6cd599ec19 - pristine_git_object: 073f22a9a28c18ad645212262bdc66528a1f6281 - src/mistralai/client/models/updateftmodelin.py: - id: 39e2d678e651 - last_write_checksum: sha1:4ea30ed8eaad36e1440614016f075f088c7e5781 - pristine_git_object: 4ac5a8a24026f6a975044de01a9918364aa64e04 - src/mistralai/client/models/updatelibraryop.py: - id: 4ba7acdb62c6 - last_write_checksum: sha1:3816c8eff226634b545843eed2d0c15fa1579308 - pristine_git_object: c5a1ad30e9bfc277cbbcdea0218a265ad10bcb96 - src/mistralai/client/models/updatemodelop.py: - id: ba149ecfe03e - last_write_checksum: sha1:2ce33ac60846a5ef70141dccbdb09950c3d1e348 - pristine_git_object: 023be97905929aa2925f20cd69b3591e6e3168d7 - src/mistralai/client/models/updateorcreatelibraryaccessop.py: - id: ec9b15418f5c - last_write_checksum: sha1:82fe6bbbb1402f03b7c0380c5fd84a8fef9bec67 - pristine_git_object: 1abe6eda3eb7d0aff8a7c146c848a63e299cedf1 - src/mistralai/client/models/uploaddocumentop.py: - id: 0018fe7ff48c - last_write_checksum: sha1:f31d565f419cbcc59af0655753cee6c643ad307a - pristine_git_object: 2c957947830ae0d467084cc6502d9d97ffdf6c81 - src/mistralai/client/models/uploadfileop.py: - id: d67619670938 - last_write_checksum: sha1:00664ba8af70ffc96871eee02890411475ca6c37 - pristine_git_object: 50848f0b663f60f9a303010f3c940919939949c9 - src/mistralai/client/models/uploadfileout.py: - id: 42466f2bebfb - last_write_checksum: sha1:44d0e5d419fb82c56c33c0f9af8902b3cc06bf6d - pristine_git_object: be291efb523965c155fc922d896da2cf682378ab + last_write_checksum: sha1:968b4bc32731be6c63be3fd90eb26f4357f891a3 + pristine_git_object: 42f0ffb7f16bee4f68f9db9807aa4ec3d9ae5176 + src/mistralai/client/models/unarchivemodelresponse.py: + id: 22e2ccbb0c80 + last_write_checksum: sha1:a69d8dc8636f3326eb61892b85a9b60044b457fe + pristine_git_object: 5c75d30edaade853f085533da0f9f5de221b6e44 + src/mistralai/client/models/updateagentrequest.py: + id: 914b4b2be67a + last_write_checksum: sha1:f37178288254e905ce298befbe801fa6ba63ec0e + pristine_git_object: b751ff74396ca0e74411a7a1549c6e0b4988fc49 + src/mistralai/client/models/updatedocumentrequest.py: + id: a8cfda07d337 + last_write_checksum: sha1:c644725ae379f22550d00b42baefb511d1cc3667 + pristine_git_object: 61e696555c0654208b0d9dcd63fc475ad85297d4 + src/mistralai/client/models/updatelibraryrequest.py: + id: 51bc63885337 + last_write_checksum: sha1:622d6a7af58d2e86d7d2dd4e312883d11ce5a8a8 + pristine_git_object: 91cbf2a1c76361c9c5ee1554c80f1507ff5ee50b + src/mistralai/client/models/updatemodelrequest.py: + id: fe649967751e + last_write_checksum: sha1:dbba8a6ccbfae36ac56808742f4c05ab99dd2c6c + pristine_git_object: f685cfcce1aa3669159fec902ba78034ef3141b8 src/mistralai/client/models/usageinfo.py: id: 54adb9a3af16 - last_write_checksum: sha1:fcfdc921bbcc78436ef156dd7a2eff1123c4036f - pristine_git_object: e78f92e75f86fd593469f7267aad72e417178161 + last_write_checksum: sha1:04705526057c43495284fe9c50cf7df2af7b49fd + pristine_git_object: 31cbf07e3e38df4452da320e44f3fa9aef17c196 src/mistralai/client/models/usermessage.py: id: cb583483acf4 - last_write_checksum: sha1:215406ca76602e899620ef763e216d71f8cd9fcd - pristine_git_object: 25ccdf805e9fbc65da7b6d0051f13224cf0e04fa + last_write_checksum: sha1:0060ee5f5fbbd78073cd56546127a021354a8072 + pristine_git_object: 63e7679246a11fe8e7a3db06e382779c05c64366 src/mistralai/client/models/validationerror.py: id: 15df3c7368ab last_write_checksum: sha1:63df5739d68f984470d4d1b8661a875201cc301d pristine_git_object: 385714c8cb80a8afbca6d5142a2d378d0d165cf9 src/mistralai/client/models/wandbintegration.py: id: 4823c1e80942 - last_write_checksum: sha1:6391a293368ba6fa98114ce510a7665b47d82721 - pristine_git_object: c5db4a6d409f1d84d356a471995119a070db627a - src/mistralai/client/models/wandbintegrationout.py: - id: 6b103d74195c - last_write_checksum: sha1:37caaf5224b216826c48912538959baa0a7d997a - pristine_git_object: d0a09bf48c3a24f5382a626d26897afe2d680f7e + last_write_checksum: sha1:cc0a7ce49756928f4d261375526a3498b9e4f05d + pristine_git_object: f0df2c77845b2741802730fcd4f3c5d31b7ddd8e + src/mistralai/client/models/wandbintegrationresult.py: + id: 8787b4ad5458 + last_write_checksum: sha1:6ba506e01333a3084f63fbfccb459235b6560554 + pristine_git_object: 575cbd42297f02a54542c7eda3a4cabaa28dda23 src/mistralai/client/models/websearchpremiumtool.py: id: bfe88af887e3 - last_write_checksum: sha1:9f9b4bfeea780cec16b9457ee800524c3eba7a4b - pristine_git_object: 9588ab1d7361d3ab1cba2f16e74695273cc03557 + last_write_checksum: sha1:ceb073d3b3916b2ff8f7b7e5eb01692893024d68 + pristine_git_object: 00d4a4b427331660d29513ec43e68fc7cf8afcfb src/mistralai/client/models/websearchtool.py: id: 26b0903423e5 - last_write_checksum: sha1:9afaf3738be10d0a401b34e15db25612ee33465f - pristine_git_object: 27502909ea608f8e0b4a71484da94d26209e0c07 + last_write_checksum: sha1:a07d7ace2d68c944c686e69053bef8d84231814b + pristine_git_object: 6871080f6279ef42a0525c1e26368baafc98fbb7 src/mistralai/client/models_.py: id: 1d277958a843 - last_write_checksum: sha1:f50e7b7194f97de4abf0afd70b5e1c52b805cef6 - pristine_git_object: 05b33ac72da14401b700c4abfb28ca33b5af702b + last_write_checksum: sha1:b9ea906a7704aa57efe5d13ac547e502d961d3b5 + pristine_git_object: a287c413ddf48bd5ff7fc0a685e05d4bcdabb6e5 src/mistralai/client/ocr.py: id: 2f804a12fc62 - last_write_checksum: sha1:2cfde7a27733502b87690c1025adefe5b717da57 - pristine_git_object: 2aa382295a9f1561021a36f3a68a9fb505cfe536 + last_write_checksum: sha1:707d91582149e76a3109df8b1a58bfd44111a93d + pristine_git_object: a46119d1577036be57896a7ea3737ab508497e4f src/mistralai/client/py.typed: id: d95cd1565e33 last_write_checksum: sha1:8efc425ffe830805ffcc0f3055871bdcdc542c60 pristine_git_object: 3e38f1a929f7d6b1d6de74604aa87e3d8f010544 src/mistralai/client/sdk.py: id: 48edbcb38d7e - last_write_checksum: sha1:be11dc3f70c773dd5c6baba6b3fafd996c5baec2 - pristine_git_object: b1ab54935a3421008c78f4864bd6097c0a098040 + last_write_checksum: sha1:365709e35dc4e450a2c4931e75dcbd04568ab361 + pristine_git_object: 80bf25a749eb3b36035aaafa15f059bcf403ec80 src/mistralai/client/sdkconfiguration.py: id: b7dd68a0235e last_write_checksum: sha1:c6944f12c6fdc992d43db943b24c8c90854cde5e pristine_git_object: 712e92e05c7fd3016431ec62ecb7b7789c8b7071 src/mistralai/client/transcriptions.py: id: 75b45780c978 - last_write_checksum: sha1:b47a3765f2191715fc19bdbc4e56414abbe59f4b - pristine_git_object: f7ef5b0a0769467bd4bea61f7b0dca3b68c3788d + last_write_checksum: sha1:27a5b7dd6ed47b0f79b95fbb8599d439512ef344 + pristine_git_object: 7f01917d6e462cff9af75e70d32afbcc5958c7de src/mistralai/client/types/__init__.py: id: 000b943f821c last_write_checksum: sha1:12a4ace69cbc63f1125eeddf901afed7cdf378b0 @@ -2850,8 +2802,8 @@ trackedFiles: pristine_git_object: 4e889aa0ffbb4402e416a40fa6259334cb0a3c5c src/mistralai/client/utils/__init__.py: id: b69505f4b269 - last_write_checksum: sha1:adb457b85659a04945857a74407306dafbdce7cb - pristine_git_object: 7ed3a42095b5921adf0e154ae6eba560a1098233 + last_write_checksum: sha1:98698da73839db7c258fd1afd45ccacff86c64be + pristine_git_object: 4bde281a1fd8c616d4b3529af0fcb79f57374310 src/mistralai/client/utils/annotations.py: id: 1ffdedfc66a2 last_write_checksum: sha1:f86ba37de752e63076f25d53f9c54fce98d2a0bd @@ -2860,18 +2812,22 @@ trackedFiles: id: c40066d868c9 last_write_checksum: sha1:412ca432d6f5a75b692a967bc6fc52e4f4eff7d5 pristine_git_object: a2c94fac73ecbfb8acd8ed4f75692318e4f863ec + src/mistralai/client/utils/dynamic_imports.py: + id: ac9918d925c0 + last_write_checksum: sha1:93d3eac90a47a039e7a652ae120bec66be6c681a + pristine_git_object: 969f2fc71178ed2114640c8f0831f4f3acb25af8 src/mistralai/client/utils/enums.py: id: a0735873b5ac last_write_checksum: sha1:fe05b6a21360b0eff1fc246e9a3ee01758521262 pristine_git_object: d897495f053459106144501c67f2215251d52a27 src/mistralai/client/utils/eventstreaming.py: id: 3263d7502030 - last_write_checksum: sha1:0e15051d74262fbe051e1ba83fd1f2c0c0a016a0 - pristine_git_object: 3fe3c7e13509d6fab08fbb8504c6c5f674c2b679 + last_write_checksum: sha1:24af3168dafe6b8d860cffb121fac11cd0e9d930 + pristine_git_object: 19a121529f180968f655baffbe446e5c1d6c2abb src/mistralai/client/utils/forms.py: id: 58842e905fce - last_write_checksum: sha1:c7929d974f46629b56e740456ddf03230b4048ab - pristine_git_object: 2b474b9a719e95c4bcae8572e5569e64f8d0b77f + last_write_checksum: sha1:d68ca0257e0e8bdc5cdc450f3e70a7ba789859f5 + pristine_git_object: 6facec5386675ccd5a26ff6093f98436a62fdf6b src/mistralai/client/utils/headers.py: id: 9066de2ead8b last_write_checksum: sha1:bcd2f47b96bfaa54b3590c557a9267142d446be6 @@ -2894,20 +2850,24 @@ trackedFiles: pristine_git_object: 3aae69c7cf618776daec8bd46f9116b06c25e837 src/mistralai/client/utils/retries.py: id: 5f1a5b90423c - last_write_checksum: sha1:94a86f31092553d4640a54c446cfe9028b4fb6ef - pristine_git_object: 90c008b0e20c1a539d65ffb387fb61a724c3c111 + last_write_checksum: sha1:bbf8e376c1c801911e65e33566d3a142f46133f9 + pristine_git_object: bea1304150e77ca06185efb7db7798aaacd5e623 src/mistralai/client/utils/security.py: id: 1acb7c006265 - last_write_checksum: sha1:e8543609e699dab330a4768786883c6ca38f07a6 - pristine_git_object: 4c73806d9c8e54a2a4cfe8f62d8c281177789f6f + last_write_checksum: sha1:3981f6571daf28b3b553beb09a4ebeeeb6ceff14 + pristine_git_object: d8b9d8fe746babd0a87846812b1f4117d1a46de2 src/mistralai/client/utils/serializers.py: id: 53c57c7f29a8 last_write_checksum: sha1:8a3a15cf273034261111f2559cacbb579e17cb1b pristine_git_object: fbc2772dc4284775be92de6a086c1eade9376417 + src/mistralai/client/utils/unions.py: + id: d23713342634 + last_write_checksum: sha1:f814d757474f039199f501aa53cdfba97a8c6645 + pristine_git_object: 14ef1bd5c5abef9bd5f2a3a4ee2f79e954c67e7e src/mistralai/client/utils/unmarshal_json_response.py: id: b13585fc5626 - last_write_checksum: sha1:c0c44d0a656477daa225724e88a7cf5c954a1df6 - pristine_git_object: 65190e5c1d70a31f51656e1644bb701b9f132bcd + last_write_checksum: sha1:372a01f5abf034ddbe5d4a3fc68e9e397f86085a + pristine_git_object: 624433c4dd42c9fb1bfae363becc76c62e390e14 src/mistralai/client/utils/url.py: id: 3c6496c17510 last_write_checksum: sha1:c64be472d29cf229f2b91102808dcb741371c227 @@ -3024,7 +2984,7 @@ examples: path: conversation_id: "" requestBody: - application/json: {"inputs": [], "stream": false, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}} + application/json: {"stream": false, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}} responses: "200": application/json: {"object": "conversation.response", "conversation_id": "", "outputs": [], "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}} @@ -3056,7 +3016,7 @@ examples: path: conversation_id: "" requestBody: - application/json: {"inputs": "", "stream": false, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}, "from_entry_id": ""} + application/json: {"stream": false, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}, "from_entry_id": ""} responses: "200": application/json: {"object": "conversation.response", "conversation_id": "", "outputs": [], "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}} @@ -3075,7 +3035,7 @@ examples: path: conversation_id: "" requestBody: - application/json: {"inputs": "", "stream": true, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}} + application/json: {"stream": true, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}} responses: "422": application/json: {} @@ -3085,7 +3045,7 @@ examples: path: conversation_id: "" requestBody: - application/json: {"inputs": [{"object": "entry", "type": "message.input", "role": "assistant", "content": "", "prefix": false}], "stream": true, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}, "from_entry_id": ""} + application/json: {"stream": true, "store": true, "handoff_execution": "server", "completion_args": {"response_format": {"type": "text"}}, "from_entry_id": ""} responses: "422": application/json: {} @@ -3279,6 +3239,7 @@ examples: page: 0 page_size: 100 created_by_me: false + order_by: "-created" responses: "200": application/json: {"object": "list", "total": 186589} @@ -3308,7 +3269,7 @@ examples: chat_completion_v1_chat_completions_post: speakeasy-default-chat-completion-v1-chat-completions-post: requestBody: - application/json: {"model": "mistral-large-latest", "stream": false, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}} + application/json: {"model": "mistral-large-latest", "stream": false, "messages": [{"role": "user", "content": "Who is the best French painter? Answer in one short sentence."}], "response_format": {"type": "text"}} responses: "200": application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "mistral-small-latest", "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, "created": 1702256327, "choices": []} @@ -3317,7 +3278,7 @@ examples: stream_chat: speakeasy-default-stream-chat: requestBody: - application/json: {"model": "mistral-large-latest", "stream": true, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}} + application/json: {"model": "mistral-large-latest", "stream": true, "messages": [{"role": "user", "content": "Who is the best French painter? Answer in one short sentence."}], "response_format": {"type": "text"}} responses: "422": application/json: {} @@ -3335,7 +3296,7 @@ examples: application/json: {"model": "codestral-latest", "top_p": 1, "stream": false, "prompt": "def", "suffix": "return a+b"} responses: "200": - application/json: {"id": "447e3e0d457e42e98248b5d2ef52a2a3", "object": "chat.completion", "model": "codestral-2508", "usage": {"prompt_tokens": 8, "completion_tokens": 91, "total_tokens": 99}, "created": 1759496862, "choices": [{"index": 0, "message": {"content": "add_numbers(a: int, b: int) -> int:\n \"\"\"\n You are given two integers `a` and `b`. Your task is to write a function that\n returns the sum of these two integers. The function should be implemented in a\n way that it can handle very large integers (up to 10^18). As a reminder, your\n code has to be in python\n \"\"\"\n", "tool_calls": null, "prefix": false, "role": "assistant"}, "finish_reason": "stop"}]} + application/json: {"id": "447e3e0d457e42e98248b5d2ef52a2a3", "object": "chat.completion", "model": "codestral-2508", "usage": {"prompt_tokens": 8, "completion_tokens": 91, "total_tokens": 99}, "created": 1759496862, "choices": [{"index": 0, "message": {"role": "assistant", "content": "add_numbers(a: int, b: int) -> int:\n \"\"\"\n You are given two integers `a` and `b`. Your task is to write a function that\n returns the sum of these two integers. The function should be implemented in a\n way that it can handle very large integers (up to 10^18). As a reminder, your\n code has to be in python\n \"\"\"\n", "tool_calls": null, "prefix": false}, "finish_reason": "stop"}]} stream_fim: speakeasy-default-stream-fim: requestBody: @@ -3354,14 +3315,14 @@ examples: application/json: {} userExample: requestBody: - application/json: {"stream": false, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}, "agent_id": ""} + application/json: {"stream": false, "messages": [{"role": "user", "content": "Who is the best French painter? Answer in one short sentence."}], "response_format": {"type": "text"}, "agent_id": ""} responses: "200": - application/json: {"id": "cf79f7daaee244b1a0ae5c7b1444424a", "object": "chat.completion", "model": "mistral-medium-latest", "usage": {"prompt_tokens": 24, "completion_tokens": 27, "total_tokens": 51, "prompt_audio_seconds": {}}, "created": 1759500534, "choices": [{"index": 0, "message": {"content": "Arrr, the scallywag Claude Monet be the finest French painter to ever splash colors on a canvas, savvy?", "tool_calls": null, "prefix": false, "role": "assistant"}, "finish_reason": "stop"}]} + application/json: {"id": "cf79f7daaee244b1a0ae5c7b1444424a", "object": "chat.completion", "model": "mistral-medium-latest", "usage": {"prompt_tokens": 24, "completion_tokens": 27, "total_tokens": 51, "prompt_audio_seconds": {}}, "created": 1759500534, "choices": [{"index": 0, "message": {"role": "assistant", "content": "Arrr, the scallywag Claude Monet be the finest French painter to ever splash colors on a canvas, savvy?", "tool_calls": null, "prefix": false}, "finish_reason": "stop"}]} stream_agents: speakeasy-default-stream-agents: requestBody: - application/json: {"stream": true, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}, "agent_id": ""} + application/json: {"stream": true, "messages": [{"role": "user", "content": "Who is the best French painter? Answer in one short sentence."}], "response_format": {"type": "text"}, "agent_id": ""} responses: "422": application/json: {} @@ -3406,7 +3367,7 @@ examples: application/json: {} userExample: requestBody: - application/json: {"input": [{"content": "", "role": "tool"}], "model": "LeBaron"} + application/json: {"input": [{"role": "tool", "content": ""}], "model": "LeBaron"} responses: "200": application/json: {"id": "352bce1a55814127a3b0bc4fb8f02a35", "model": "mistral-moderation-latest", "results": [{"categories": {"sexual": false, "hate_and_discrimination": false, "violence_and_threats": false, "dangerous_and_criminal_content": false, "selfharm": false, "health": false, "financial": false, "law": false, "pii": false}, "category_scores": {"sexual": 0.0010322310263291001, "hate_and_discrimination": 0.001597845577634871, "violence_and_threats": 0.00020342698553577065, "dangerous_and_criminal_content": 0.0029810327105224133, "selfharm": 0.00017952796770259738, "health": 0.0002959570847451687, "financial": 0.000079673009167891, "law": 0.00004539786823443137, "pii": 0.004198795650154352}}]} @@ -3422,7 +3383,7 @@ examples: chat_classifications_v1_chat_classifications_post: speakeasy-default-chat-classifications-v1-chat-classifications-post: requestBody: - application/json: {"model": "Camry", "input": [{"messages": [{"content": "", "role": "system"}]}]} + application/json: {"model": "Camry", "input": [{"messages": [{"role": "system", "content": ""}]}]} responses: "200": application/json: {"id": "mod-e5cc70bb28c444948073e77776eb30ef", "model": "Altima", "results": [{}, {"key": {"scores": {"key": 1360.53, "key1": 5946.42}}}, {"key": {"scores": {"key": 1360.53, "key1": 5946.42}}}]} @@ -3439,7 +3400,7 @@ examples: application/json: {} userExample: requestBody: - application/json: {"model": "CX-9", "document": {"document_url": "https://upset-labourer.net/", "type": "document_url"}, "bbox_annotation_format": {"type": "text"}, "document_annotation_format": {"type": "text"}} + application/json: {"model": "CX-9", "document": {"type": "document_url", "document_url": "https://upset-labourer.net/"}, "bbox_annotation_format": {"type": "text"}, "document_annotation_format": {"type": "text"}} responses: "200": application/json: {"pages": [{"index": 1, "markdown": "# LEVERAGING UNLABELED DATA TO PREDICT OUT-OF-DISTRIBUTION PERFORMANCE\nSaurabh Garg*
Carnegie Mellon University
sgarg2@andrew.cmu.edu
Sivaraman Balakrishnan
Carnegie Mellon University
sbalakri@andrew.cmu.edu
Zachary C. Lipton
Carnegie Mellon University
zlipton@andrew.cmu.edu\n## Behnam Neyshabur\nGoogle Research, Blueshift team
neyshabur@google.com\nHanie Sedghi
Google Research, Brain team
hsedghi@google.com\n#### Abstract\nReal-world machine learning deployments are characterized by mismatches between the source (training) and target (test) distributions that may cause performance drops. In this work, we investigate methods for predicting the target domain accuracy using only labeled source data and unlabeled target data. We propose Average Thresholded Confidence (ATC), a practical method that learns a threshold on the model's confidence, predicting accuracy as the fraction of unlabeled examples for which model confidence exceeds that threshold. ATC outperforms previous methods across several model architectures, types of distribution shifts (e.g., due to synthetic corruptions, dataset reproduction, or novel subpopulations), and datasets (WILDS, ImageNet, BREEDS, CIFAR, and MNIST). In our experiments, ATC estimates target performance $2-4 \\times$ more accurately than prior methods. We also explore the theoretical foundations of the problem, proving that, in general, identifying the accuracy is just as hard as identifying the optimal predictor and thus, the efficacy of any method rests upon (perhaps unstated) assumptions on the nature of the shift. Finally, analyzing our method on some toy distributions, we provide insights concerning when it works ${ }^{1}$.\n## 1 INTRODUCTION\nMachine learning models deployed in the real world typically encounter examples from previously unseen distributions. While the IID assumption enables us to evaluate models using held-out data from the source distribution (from which training data is sampled), this estimate is no longer valid in presence of a distribution shift. Moreover, under such shifts, model accuracy tends to degrade (Szegedy et al., 2014; Recht et al., 2019; Koh et al., 2021). Commonly, the only data available to the practitioner are a labeled training set (source) and unlabeled deployment-time data which makes the problem more difficult. In this setting, detecting shifts in the distribution of covariates is known to be possible (but difficult) in theory (Ramdas et al., 2015), and in practice (Rabanser et al., 2018). However, producing an optimal predictor using only labeled source and unlabeled target data is well-known to be impossible absent further assumptions (Ben-David et al., 2010; Lipton et al., 2018).\nTwo vital questions that remain are: (i) the precise conditions under which we can estimate a classifier's target-domain accuracy; and (ii) which methods are most practically useful. To begin, the straightforward way to assess the performance of a model under distribution shift would be to collect labeled (target domain) examples and then to evaluate the model on that data. However, collecting fresh labeled data from the target distribution is prohibitively expensive and time-consuming, especially if the target distribution is non-stationary. Hence, instead of using labeled data, we aim to use unlabeled data from the target distribution, that is comparatively abundant, to predict model performance. Note that in this work, our focus is not to improve performance on the target but, rather, to estimate the accuracy on the target for a given classifier.\n[^0]: Work done in part while Saurabh Garg was interning at Google ${ }^{1}$ Code is available at [https://github.com/saurabhgarg1996/ATC_code](https://github.com/saurabhgarg1996/ATC_code).\n", "images": [], "dimensions": {"dpi": 200, "height": 2200, "width": 1700}}, {"index": 2, "markdown": "![img-0.jpeg](img-0.jpeg)\nFigure 1: Illustration of our proposed method ATC. Left: using source domain validation data, we identify a threshold on a score (e.g. negative entropy) computed on model confidence such that fraction of examples above the threshold matches the validation set accuracy. ATC estimates accuracy on unlabeled target data as the fraction of examples with the score above the threshold. Interestingly, this threshold yields accurate estimates on a wide set of target distributions resulting from natural and synthetic shifts. Right: Efficacy of ATC over previously proposed approaches on our testbed with a post-hoc calibrated model. To obtain errors on the same scale, we rescale all errors with Average Confidence (AC) error. Lower estimation error is better. See Table 1 for exact numbers and comparison on various types of distribution shift. See Sec. 5 for details on our testbed.\nRecently, numerous methods have been proposed for this purpose (Deng & Zheng, 2021; Chen et al., 2021b; Jiang et al., 2021; Deng et al., 2021; Guillory et al., 2021). These methods either require calibration on the target domain to yield consistent estimates (Jiang et al., 2021; Guillory et al., 2021) or additional labeled data from several target domains to learn a linear regression function on a distributional distance that then predicts model performance (Deng et al., 2021; Deng & Zheng, 2021; Guillory et al., 2021). However, methods that require calibration on the target domain typically yield poor estimates since deep models trained and calibrated on source data are not, in general, calibrated on a (previously unseen) target domain (Ovadia et al., 2019). Besides, methods that leverage labeled data from target domains rely on the fact that unseen target domains exhibit strong linear correlation with seen target domains on the underlying distance measure and, hence, can be rendered ineffective when such target domains with labeled data are unavailable (in Sec. 5.1 we demonstrate such a failure on a real-world distribution shift problem). Therefore, throughout the paper, we assume access to labeled source data and only unlabeled data from target domain(s).\nIn this work, we first show that absent assumptions on the source classifier or the nature of the shift, no method of estimating accuracy will work generally (even in non-contrived settings). To estimate accuracy on target domain perfectly, we highlight that even given perfect knowledge of the labeled source distribution (i.e., $p_{s}(x, y)$ ) and unlabeled target distribution (i.e., $p_{t}(x)$ ), we need restrictions on the nature of the shift such that we can uniquely identify the target conditional $p_{t}(y \\mid x)$. Thus, in general, identifying the accuracy of the classifier is as hard as identifying the optimal predictor.\nSecond, motivated by the superiority of methods that use maximum softmax probability (or logit) of a model for Out-Of-Distribution (OOD) detection (Hendrycks & Gimpel, 2016; Hendrycks et al., 2019), we propose a simple method that leverages softmax probability to predict model performance. Our method, Average Thresholded Confidence (ATC), learns a threshold on a score (e.g., maximum confidence or negative entropy) of model confidence on validation source data and predicts target domain accuracy as the fraction of unlabeled target points that receive a score above that threshold. ATC selects a threshold on validation source data such that the fraction of source examples that receive the score above the threshold match the accuracy of those examples. Our primary contribution in ATC is the proposal of obtaining the threshold and observing its efficacy on (practical) accuracy estimation. Importantly, our work takes a step forward in positively answering the question raised in Deng & Zheng (2021); Deng et al. (2021) about a practical strategy to select a threshold that enables accuracy prediction with thresholded model confidence.\n", "images": [{"id": "img-0.jpeg", "top_left_x": 292, "top_left_y": 217, "bottom_right_x": 1405, "bottom_right_y": 649, "image_base64": ""}], "dimensions": {"dpi": 200, "height": 2200, "width": 1700}}, {"index": 3, "markdown": "", "images": [], "dimensions": {"dpi": 539192, "height": 944919, "width": 247256}}, {"index": 27, "markdown": "![img-8.jpeg](img-8.jpeg)\nFigure 9: Scatter plot of predicted accuracy versus (true) OOD accuracy for vision datasets except MNIST with a ResNet50 model. Results reported by aggregating MAE numbers over 4 different seeds.\n", "images": [{"id": "img-8.jpeg", "top_left_x": 290, "top_left_y": 226, "bottom_right_x": 1405, "bottom_right_y": 1834, "image_base64": ""}], "dimensions": {"dpi": 200, "height": 2200, "width": 1700}}, {"index": 28, "markdown": "| Dataset | Shift | IM | | AC | | DOC | | GDE | ATC-MC (Ours) | | ATC-NE (Ours) | | | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | | | | Pre T | Post T | Pre T | Post T | Pre T | Post T | Post T | Pre T | Post T | Pre T | Post T | | CIFAR10 | Natural | 6.60 | 5.74 | 9.88 | 6.89 | 7.25 | 6.07 | 4.77 | 3.21 | 3.02 | 2.99 | 2.85 | | | | (0.35) | (0.30) | (0.16) | (0.13) | (0.15) | (0.16) | (0.13) | (0.49) | (0.40) | (0.37) | (0.29) | | | Synthetic | 12.33 | 10.20 | 16.50 | 11.91 | 13.87 | 11.08 | 6.55 | 4.65 | 4.25 | 4.21 | 3.87 | | | | (0.51) | (0.48) | (0.26) | (0.17) | (0.18) | (0.17) | (0.35) | (0.55) | (0.55) | (0.55) | (0.75) | | CIFAR100 | Synthetic | 13.69 | 11.51 | 23.61 | 13.10 | 14.60 | 10.14 | 9.85 | 5.50 | 4.75 | 4.72 | 4.94 | | | | (0.55) | (0.41) | (1.16) | (0.80) | (0.77) | (0.64) | (0.57) | (0.70) | (0.73) | (0.74) | (0.74) | | ImageNet200 | Natural | 12.37 | 8.19 | 22.07 | 8.61 | 15.17 | 7.81 | 5.13 | 4.37 | 2.04 | 3.79 | 1.45 | | | | (0.25) | (0.33) | (0.08) | (0.25) | (0.11) | (0.29) | (0.08) | (0.39) | (0.24) | (0.30) | (0.27) | | | Synthetic | 19.86 | 12.94 | 32.44 | 13.35 | 25.02 | 12.38 | 5.41 | 5.93 | 3.09 | 5.00 | 2.68 | | | | (1.38) | (1.81) | (1.00) | (1.30) | (1.10) | (1.38) | (0.89) | (1.38) | (0.87) | (1.28) | (0.45) | | ImageNet | Natural | 7.77 | 6.50 | 18.13 | 6.02 | 8.13 | 5.76 | 6.23 | 3.88 | 2.17 | 2.06 | 0.80 | | | | (0.27) | (0.33) | (0.23) | (0.34) | (0.27) | (0.37) | (0.41) | (0.53) | (0.62) | (0.54) | (0.44) | | | Synthetic | 13.39 | 10.12 | 24.62 | 8.51 | 13.55 | 7.90 | 6.32 | 3.34 | 2.53 | 2.61 | 4.89 | | | | (0.53) | (0.63) | (0.64) | (0.71) | (0.61) | (0.72) | (0.33) | (0.53) | (0.36) | (0.33) | (0.83) | | FMoW-WILDS | Natural | 5.53 | 4.31 | 33.53 | 12.84 | 5.94 | 4.45 | 5.74 | 3.06 | 2.70 | 3.02 | 2.72 | | | | (0.33) | (0.63) | (0.13) | (12.06) | (0.36) | (0.77) | (0.55) | (0.36) | (0.54) | (0.35) | (0.44) | | RxRx1-WILDS | Natural | 5.80 | 5.72 | 7.90 | 4.84 | 5.98 | 5.98 | 6.03 | 4.66 | 4.56 | 4.41 | 4.47 | | | | (0.17) | (0.15) | (0.24) | (0.09) | (0.15) | (0.13) | (0.08) | (0.38) | (0.38) | (0.31) | (0.26) | | Amazon-WILDS | Natural | 2.40 | 2.29 | 8.01 | 2.38 | 2.40 | 2.28 | 17.87 | 1.65 | 1.62 | 1.60 | 1.59 | | | | (0.08) | (0.09) | (0.53) | (0.17) | (0.09) | (0.09) | (0.18) | (0.06) | (0.05) | (0.14) | (0.15) | | CivilCom.-WILDS | Natural | 12.64 | 10.80 | 16.76 | 11.03 | 13.31 | 10.99 | 16.65 | | 7.14 | | | | | | (0.52) | (0.48) | (0.53) | (0.49) | (0.52) | (0.49) | (0.25) | | (0.41) | | | | MNIST | Natural | 18.48 | 15.99 | 21.17 | 14.81 | 20.19 | 14.56 | 24.42 | 5.02 | 2.40 | 3.14 | 3.50 | | | | (0.45) | (1.53) | (0.24) | (3.89) | (0.23) | (3.47) | (0.41) | (0.44) | (1.83) | (0.49) | (0.17) | | ENTITY-13 | Same | 16.23 | 11.14 | 24.97 | 10.88 | 19.08 | 10.47 | 10.71 | 5.39 | 3.88 | 4.58 | 4.19 | | | | (0.77) | (0.65) | (0.70) | (0.77) | (0.65) | (0.72) | (0.74) | (0.92) | (0.61) | (0.85) | (0.16) | | | Novel | 28.53 | 22.02 | 38.33 | 21.64 | 32.43 | 21.22 | 20.61 | 13.58 | 10.28 | 12.25 | 6.63 | | | | (0.82) | (0.68) | (0.75) | (0.86) | (0.69) | (0.80) | (0.60) | (1.15) | (1.34) | (1.21) | (0.93) | | ENTITY-30 | Same | 18.59 | 14.46 | 28.82 | 14.30 | 21.63 | 13.46 | 12.92 | 9.12 | 7.75 | 8.15 | 7.64 | | | | (0.51) | (0.52) | (0.43) | (0.71) | (0.37) | (0.59) | (0.14) | (0.62) | (0.72) | (0.68) | (0.88) | | | Novel | 32.34 | 26.85 | 44.02 | 26.27 | 36.82 | 25.42 | 23.16 | 17.75 | 14.30 | 15.60 | 10.57 | | | | (0.60) | (0.58) | (0.56) | (0.79) | (0.47) | (0.68) | (0.12) | (0.76) | (0.85) | (0.86) | (0.86) | | NONLIVING-26 | Same | 18.66 | 17.17 | 26.39 | 16.14 | 19.86 | 15.58 | 16.63 | 10.87 | 10.24 | 10.07 | 10.26 | | | | (0.76) | (0.74) | (0.82) | (0.81) | (0.67) | (0.76) | (0.45) | (0.98) | (0.83) | (0.92) | (1.18) | | | Novel | 33.43 | 31.53 | 41.66 | 29.87 | 35.13 | 29.31 | 29.56 | 21.70 | 20.12 | 19.08 | 18.26 | | | | (0.67) | (0.65) | (0.67) | (0.71) | (0.54) | (0.64) | (0.21) | (0.86) | (0.75) | (0.82) | (1.12) | | LIVING-17 | Same | 12.63 | 11.05 | 18.32 | 10.46 | 14.43 | 10.14 | 9.87 | 4.57 | 3.95 | 3.81 | 4.21 | | | | (1.25) | (1.20) | (1.01) | (1.12) | (1.11) | (1.16) | (0.61) | (0.71) | (0.48) | (0.22) | (0.53) | | | Novel | 29.03 | 26.96 | 35.67 | 26.11 | 31.73 | 25.73 | 23.53 | 16.15 | 14.49 | 12.97 | 11.39 | | | | (1.44) | (1.38) | (1.09) | (1.27) | (1.19) | (1.35) | (0.52) | (1.36) | (1.46) | (1.52) | (1.72) |\nTable 3: Mean Absolute estimation Error (MAE) results for different datasets in our setup grouped by the nature of shift. 'Same' refers to same subpopulation shifts and 'Novel' refers novel subpopulation shifts. We include details about the target sets considered in each shift in Table 2. Post T denotes use of TS calibration on source. For language datasets, we use DistilBERT-base-uncased, for vision dataset we report results with DenseNet model with the exception of MNIST where we use FCN. Across all datasets, we observe that ATC achieves superior performance (lower MAE is better). For GDE post T and pre T estimates match since TS doesn't alter the argmax prediction. Results reported by aggregating MAE numbers over 4 different seeds. Values in parenthesis (i.e., $(\\cdot)$ ) denote standard deviation values.\n", "images": [], "dimensions": {"dpi": 200, "height": 2200, "width": 1700}}, {"index": 29, "markdown": "| Dataset | Shift | IM | | AC | | DOC | | GDE | ATC-MC (Ours) | | ATC-NE (Ours) | | | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | | | | Pre T | Post T | Pre T | Post T | Pre T | Post T | Post T | Pre T | Post T | Pre T | Post T | | CIFAR10 | Natural | 7.14 | 6.20 | 10.25 | 7.06 | 7.68 | 6.35 | 5.74 | 4.02 | 3.85 | 3.76 | 3.38 | | | | (0.14) | (0.11) | (0.31) | (0.33) | (0.28) | (0.27) | (0.25) | (0.38) | (0.30) | (0.33) | (0.32) | | | Synthetic | 12.62 | 10.75 | 16.50 | 11.91 | 13.93 | 11.20 | 7.97 | 5.66 | 5.03 | 4.87 | 3.63 | | | | (0.76) | (0.71) | (0.28) | (0.24) | (0.29) | (0.28) | (0.13) | (0.64) | (0.71) | (0.71) | (0.62) | | CIFAR100 | Synthetic | 12.77 | 12.34 | 16.89 | 12.73 | 11.18 | 9.63 | 12.00 | 5.61 | 5.55 | 5.65 | 5.76 | | | | (0.43) | (0.68) | (0.20) | (2.59) | (0.35) | (1.25) | (0.48) | (0.51) | (0.55) | (0.35) | (0.27) | | ImageNet200 | Natural | 12.63 | 7.99 | 23.08 | 7.22 | 15.40 | 6.33 | 5.00 | 4.60 | 1.80 | 4.06 | 1.38 | | | | (0.59) | (0.47) | (0.31) | (0.22) | (0.42) | (0.24) | (0.36) | (0.63) | (0.17) | (0.69) | (0.29) | | | Synthetic | 20.17 | 11.74 | 33.69 | 9.51 | 25.49 | 8.61 | 4.19 | 5.37 | 2.78 | 4.53 | 3.58 | | | | (0.74) | (0.80) | (0.73) | (0.51) | (0.66) | (0.50) | (0.14) | (0.88) | (0.23) | (0.79) | (0.33) | | ImageNet | Natural | 8.09 | 6.42 | 21.66 | 5.91 | 8.53 | 5.21 | 5.90 | 3.93 | 1.89 | 2.45 | 0.73 | | | | (0.25) | (0.28) | (0.38) | (0.22) | (0.26) | (0.25) | (0.44) | (0.26) | (0.21) | (0.16) | (0.10) | | | Synthetic | 13.93 | 9.90 | 28.05 | 7.56 | 13.82 | 6.19 | 6.70 | 3.33 | 2.55 | 2.12 | 5.06 | | | | (0.14) | (0.23) | (0.39) | (0.13) | (0.31) | (0.07) | (0.52) | (0.25) | (0.25) | (0.31) | (0.27) | | FMoW-WILDS | Natural | 5.15 | 3.55 | 34.64 | 5.03 | 5.58 | 3.46 | 5.08 | 2.59 | 2.33 | 2.52 | 2.22 | | | | (0.19) | (0.41) | (0.22) | (0.29) | (0.17) | (0.37) | (0.46) | (0.32) | (0.28) | (0.25) | (0.30) | | RxRx1-WILDS | Natural | 6.17 | 6.11 | 21.05 | 5.21 | 6.54 | 6.27 | 6.82 | 5.30 | 5.20 | 5.19 | 5.63 | | | | (0.20) | (0.24) | (0.31) | (0.18) | (0.21) | (0.20) | (0.31) | (0.30) | (0.44) | (0.43) | (0.55) | | Entity-13 | Same | 18.32 | 14.38 | 27.79 | 13.56 | 20.50 | 13.22 | 16.09 | 9.35 | 7.50 | 7.80 | 6.94 | | | | (0.29) | (0.53) | (1.18) | (0.58) | (0.47) | (0.58) | (0.84) | (0.79) | (0.65) | (0.62) | (0.71) | | | Novel | 28.82 | 24.03 | 38.97 | 22.96 | 31.66 | 22.61 | 25.26 | 17.11 | 13.96 | 14.75 | 9.94 | | | | (0.30) | (0.55) | (1.32) | (0.59) | (0.54) | (0.58) | (1.08) | (0.93) | (0.64) | (0.78) | | | Entity-30 | Same | 16.91 | 14.61 | 26.84 | 14.37 | 18.60 | 13.11 | 13.74 | 8.54 | 7.94 | 7.77 | 8.04 | | | | (1.33) | (1.11) | (2.15) | (1.34) | (1.69) | (1.30) | (1.07) | (1.47) | (1.38) | (1.44) | (1.51) | | | Novel | 28.66 | 25.83 | 39.21 | 25.03 | 30.95 | 23.73 | 23.15 | 15.57 | 13.24 | 12.44 | 11.05 | | | | (1.16) | (0.88) | (2.03) | (1.11) | (1.64) | (1.11) | (0.51) | (1.44) | (1.15) | (1.26) | (1.13) | | NonLIVING-26 | Same | 17.43 | 15.95 | 27.70 | 15.40 | 18.06 | 14.58 | 16.99 | 10.79 | 10.13 | 10.05 | 10.29 | | | | (0.90) | (0.86) | (0.90) | (0.69) | (1.00) | (0.78) | (1.25) | (0.62) | (0.32) | (0.46) | (0.79) | | | Novel | 29.51 | 27.75 | 40.02 | 26.77 | 30.36 | 25.93 | 27.70 | 19.64 | 17.75 | 16.90 | 15.69 | | | | (0.86) | (0.82) | (0.76) | (0.82) | (0.95) | (0.80) | (1.42) | (0.68) | (0.53) | (0.60) | (0.83) | | LIVING-17 | Same | 14.28 | 12.21 | 23.46 | 11.16 | 15.22 | 10.78 | 10.49 | 4.92 | 4.23 | 4.19 | 4.73 | | | | (0.96) | (0.93) | (1.16) | (0.90) | (0.96) | (0.99) | (0.97) | (0.57) | (0.42) | (0.35) | (0.24) | | | Novel | 28.91 | 26.35 | 38.62 | 24.91 | 30.32 | 24.52 | 22.49 | 15.42 | 13.02 | 12.29 | 10.34 | | | | (0.66) | (0.73) | (1.01) | (0.61) | (0.59) | (0.74) | (0.85) | (0.59) | (0.53) | (0.73) | (0.62) |\nTable 4: Mean Absolute estimation Error (MAE) results for different datasets in our setup grouped by the nature of shift for ResNet model. 'Same' refers to same subpopulation shifts and 'Novel' refers novel subpopulation shifts. We include details about the target sets considered in each shift in Table 2. Post T denotes use of TS calibration on source. Across all datasets, we observe that ATC achieves superior performance (lower MAE is better). For GDE post T and pre T estimates match since TS doesn't alter the argmax prediction. Results reported by aggregating MAE numbers over 4 different seeds. Values in parenthesis (i.e., $(\\cdot)$ ) denote standard deviation values.\n", "images": [], "dimensions": {"dpi": 200, "height": 2200, "width": 1700}}], "model": "mistral-ocr-2503-completion", "usage_info": {"pages_processed": 29, "doc_size_bytes": null}} @@ -3501,7 +3462,7 @@ examples: sort_order: "desc" responses: "200": - application/json: {"pagination": {"total_items": 23246, "total_pages": 881485, "current_page": 173326, "page_size": 318395, "has_more": false}, "data": [{"id": "5106c0c7-30fb-4fd3-9083-129b77f9f509", "library_id": "71eb68a2-756e-48b0-9d2b-a04d7bf95ff5", "hash": "", "mime_type": "", "extension": "pdf", "size": 367159, "name": "", "created_at": "2024-09-24T04:50:43.988Z", "processing_status": "", "uploaded_by_id": "7d65f4d8-1997-479f-bfb4-535c0144b48c", "uploaded_by_type": "", "tokens_processing_total": 957230}]} + application/json: {"pagination": {"total_items": 23246, "total_pages": 881485, "current_page": 173326, "page_size": 318395, "has_more": false}, "data": [{"id": "5106c0c7-30fb-4fd3-9083-129b77f9f509", "library_id": "71eb68a2-756e-48b0-9d2b-a04d7bf95ff5", "hash": "", "mime_type": "", "extension": "pdf", "size": 367159, "name": "", "created_at": "2024-09-24T04:50:43.988Z", "uploaded_by_id": "7d65f4d8-1997-479f-bfb4-535c0144b48c", "uploaded_by_type": "", "processing_status": "", "tokens_processing_total": 957230}]} "422": application/json: {} libraries_documents_upload_v1: @@ -3513,7 +3474,7 @@ examples: multipart/form-data: {"file": "x-file: example.file"} responses: "200": - application/json: {"id": "d40f9b56-c832-405d-aa99-b3e442254dd8", "library_id": "868d7955-009a-4433-bfc6-ad7b4be4e7e4", "hash": "", "mime_type": "", "extension": "m2v", "size": 418415, "name": "", "created_at": "2025-04-30T20:11:27.130Z", "processing_status": "", "uploaded_by_id": "7db8d896-09c9-438c-b6dc-aa5c70102b3f", "uploaded_by_type": "", "tokens_processing_total": 61161} + application/json: {"id": "d40f9b56-c832-405d-aa99-b3e442254dd8", "library_id": "868d7955-009a-4433-bfc6-ad7b4be4e7e4", "hash": "", "mime_type": "", "extension": "m2v", "size": 418415, "name": "", "created_at": "2025-04-30T20:11:27.130Z", "uploaded_by_id": "7db8d896-09c9-438c-b6dc-aa5c70102b3f", "uploaded_by_type": "", "processing_status": "", "tokens_processing_total": 61161} "422": application/json: {} libraries_documents_get_v1: @@ -3524,7 +3485,7 @@ examples: document_id: "90973aec-0508-4375-8b00-91d732414745" responses: "200": - application/json: {"id": "0de60230-717d-459a-8c0f-fbb9360c01be", "library_id": "e0bf3cf9-cd3b-405b-b842-ac7fcb9c373e", "hash": "", "mime_type": "", "extension": "jpe", "size": 402478, "name": "", "created_at": "2023-07-29T21:43:20.750Z", "processing_status": "", "uploaded_by_id": "d5eadabe-d7f2-4f87-a337-f80c192f886d", "uploaded_by_type": "", "tokens_processing_total": 793889} + application/json: {"id": "0de60230-717d-459a-8c0f-fbb9360c01be", "library_id": "e0bf3cf9-cd3b-405b-b842-ac7fcb9c373e", "hash": "", "mime_type": "", "extension": "jpe", "size": 402478, "name": "", "created_at": "2023-07-29T21:43:20.750Z", "uploaded_by_id": "d5eadabe-d7f2-4f87-a337-f80c192f886d", "uploaded_by_type": "", "processing_status": "", "tokens_processing_total": 793889} "422": application/json: {} libraries_documents_update_v1: @@ -3537,7 +3498,7 @@ examples: application/json: {} responses: "200": - application/json: {"id": "1111e519-9ba5-42de-9301-938fbfee59fc", "library_id": "70aac5e3-23f7-439b-bbef-090e4c1dbd6d", "hash": "", "mime_type": "", "extension": "m1v", "size": 802305, "name": "", "created_at": "2024-07-02T20:02:03.680Z", "processing_status": "", "uploaded_by_id": "08471957-b27d-4437-8242-57256727dc49", "uploaded_by_type": "", "tokens_processing_total": 806683} + application/json: {"id": "1111e519-9ba5-42de-9301-938fbfee59fc", "library_id": "70aac5e3-23f7-439b-bbef-090e4c1dbd6d", "hash": "", "mime_type": "", "extension": "m1v", "size": 802305, "name": "", "created_at": "2024-07-02T20:02:03.680Z", "uploaded_by_id": "08471957-b27d-4437-8242-57256727dc49", "uploaded_by_type": "", "processing_status": "", "tokens_processing_total": 806683} "422": application/json: {} libraries_documents_delete_v1: @@ -4328,6 +4289,16 @@ examples: responses: "200": application/json: {"id": "", "object": "batch", "input_files": ["7309e534-200e-43a4-83c5-dc4c2a14c745"], "endpoint": "", "errors": [], "status": "FAILED", "created_at": 157212, "total_requests": 188914, "completed_requests": 685483, "succeeded_requests": 127060, "failed_requests": 428561} + agents_api_v1_agents_delete_alias: + speakeasy-default-agents-api-v1-agents-delete-alias: + parameters: + path: + agent_id: "" + query: + alias: "" + responses: + "422": + application/json: {} examplesVersion: 1.0.2 generatedTests: {} generatedFiles: diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 23b915b5..733650dc 100644 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -13,8 +13,9 @@ generation: requestResponseComponentNamesFeb2024: true securityFeb2025: true sharedErrorComponentsApr2025: true - methodSignaturesApr2024: true sharedNestedComponentsJan2026: true + nameOverrideFeb2026: true + methodSignaturesApr2024: true auth: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false @@ -22,6 +23,7 @@ generation: schemas: allOfMergeStrategy: shallowMerge requestBodyFieldName: "" + versioningStrategy: automatic persistentEdits: enabled: "true" tests: @@ -29,26 +31,31 @@ generation: generateNewTests: false skipResponseBodyAssertions: false python: - version: 2.0.0a3 + version: 2.0.0-a3.1 additionalDependencies: dev: pytest: ^8.2.2 pytest-asyncio: ^0.23.7 + main: {} allowedRedefinedBuiltins: - id - object + - input + - dir asyncMode: both authors: - Mistral baseErrorName: MistralError clientServerStatusCodesAsErrors: true - constFieldCasing: upper + constFieldCasing: normal defaultErrorName: SDKError description: Python Client SDK for the Mistral AI API. enableCustomCodeRegions: true enumFormat: union envVarPrefix: MISTRAL fixFlags: + asyncPaginationSep2025: true + conflictResistantModelImportsFeb2026: true responseRequiredSep2024: true flatAdditionalProperties: true flattenGlobalSecurity: true @@ -60,17 +67,17 @@ python: option: openapi paths: callbacks: "" - errors: "" + errors: errors operations: "" shared: "" webhooks: "" inferUnionDiscriminators: true inputModelSuffix: input license: "" - maxMethodParams: 15 + maxMethodParams: 999 methodArguments: infer-optional-args moduleName: mistralai.client - multipartArrayFormat: legacy + multipartArrayFormat: standard outputModelSuffix: output packageManager: uv packageName: mistralai @@ -80,3 +87,4 @@ python: responseFormat: flat sseFlatResponse: false templateVersion: v2 + useAsyncHooks: false diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 4aa0af42..d051080f 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,4 +1,4 @@ -speakeasyVersion: 1.685.0 +speakeasyVersion: 1.729.0 sources: mistral-azure-source: sourceNamespace: mistral-openapi-azure @@ -14,8 +14,8 @@ sources: - latest mistral-openapi: sourceNamespace: mistral-openapi - sourceRevisionDigest: sha256:74d0de7750f6a1878b68c9da683eba7a447d7c367131d0cb8f5c3b1e05829624 - sourceBlobDigest: sha256:41e8354c48993fc29be68959d835ea4f8e0cc1d4b4fbd527afcd970bc02c62a2 + sourceRevisionDigest: sha256:4f8e25101b35a66b9c93089fe3d491990268bdbefb70a349740e01ba9c8e28f8 + sourceBlobDigest: sha256:8566b35549178910c6fd4d005474d612bb9c476ef58785bb51c46251de145f71 tags: - latest targets: @@ -25,24 +25,24 @@ targets: sourceRevisionDigest: sha256:e32d21a6317d1bca6ab29f05603b96038e841752c2698aab47f434ea0d6530b7 sourceBlobDigest: sha256:2dad2b1b7a79de6917c363ce7e870d11efe31ac08e3bfe0258f72823fe1ad13e codeSamplesNamespace: mistral-openapi-azure-code-samples - codeSamplesRevisionDigest: sha256:a34c3049c604d0bb67101d042e959f14098964fe784f98975a9201c84dbf44d0 + codeSamplesRevisionDigest: sha256:248e5daaa44589805664ab1479502885758fde0f1da3b384b97b1a09d74c8256 mistralai-gcp-sdk: source: mistral-google-cloud-source sourceNamespace: mistral-openapi-google-cloud sourceRevisionDigest: sha256:4d9938ab74c4d41d62cd24234c8b8109e286c4aeec093e21d369259a43173113 sourceBlobDigest: sha256:5a558d5ea7a936723c7a5540db5a1fba63d85d25b453372e1cf16395b30c98d3 codeSamplesNamespace: mistral-openapi-google-cloud-code-samples - codeSamplesRevisionDigest: sha256:fa36e5999e79c32e8b2c1317cc0d6ed179912ced15194f02b5f80da22e45ae5f + codeSamplesRevisionDigest: sha256:f6c4dc988e9b7be6f8d8087d14b2269be601bb9bff2227b07e1018efe88e1556 mistralai-sdk: source: mistral-openapi sourceNamespace: mistral-openapi - sourceRevisionDigest: sha256:74d0de7750f6a1878b68c9da683eba7a447d7c367131d0cb8f5c3b1e05829624 - sourceBlobDigest: sha256:41e8354c48993fc29be68959d835ea4f8e0cc1d4b4fbd527afcd970bc02c62a2 + sourceRevisionDigest: sha256:4f8e25101b35a66b9c93089fe3d491990268bdbefb70a349740e01ba9c8e28f8 + sourceBlobDigest: sha256:8566b35549178910c6fd4d005474d612bb9c476ef58785bb51c46251de145f71 codeSamplesNamespace: mistral-openapi-code-samples - codeSamplesRevisionDigest: sha256:99fcae1bc81801e3825648a44f5ffa62a8f124e3186e5570be40414de164e7f2 + codeSamplesRevisionDigest: sha256:f3cf9d6d99a27d6e753bd6e1a2f2c2fb290f412a455576de4bab610ab4825939 workflow: workflowVersion: 1.0.0 - speakeasyVersion: 1.685.0 + speakeasyVersion: 1.729.0 sources: mistral-azure-source: inputs: diff --git a/.speakeasy/workflow.yaml b/.speakeasy/workflow.yaml index ba109c09..65d6d202 100644 --- a/.speakeasy/workflow.yaml +++ b/.speakeasy/workflow.yaml @@ -1,5 +1,5 @@ workflowVersion: 1.0.0 -speakeasyVersion: 1.685.0 +speakeasyVersion: 1.729.0 sources: mistral-azure-source: inputs: diff --git a/Makefile b/Makefile index a169d78f..bba024ad 100644 --- a/Makefile +++ b/Makefile @@ -1,19 +1,29 @@ -.PHONY: help test-generate update-speakeasy-version +.PHONY: help generate test-generate update-speakeasy-version check-config help: @echo "Available targets:" + @echo " make generate Generate all SDKs (main, Azure, GCP)" @echo " make test-generate Test SDK generation locally" @echo " make update-speakeasy-version VERSION=x.y.z Update Speakeasy CLI version" + @echo " make check-config Check gen.yaml against recommended defaults" @echo "" @echo "Note: Production SDK generation is done via GitHub Actions:" @echo " .github/workflows/sdk_generation_mistralai_sdk.yaml" +# Generate all SDKs (main, Azure, GCP) +generate: + speakeasy run -t all + # Test SDK generation locally. # For production, use GitHub Actions: .github/workflows/sdk_generation_mistralai_sdk.yaml # This uses the Speakeasy CLI version defined in .speakeasy/workflow.yaml test-generate: speakeasy run --skip-versioning +# Check gen.yaml configuration against Speakeasy recommended defaults +check-config: + speakeasy configure generation check + # Update the Speakeasy CLI version (the code generator tool). # This modifies speakeasyVersion in .speakeasy/workflow.yaml and regenerates the SDK. # Usage: make update-speakeasy-version VERSION=1.685.0 diff --git a/README.md b/README.md index a774a9e1..dd98b5cc 100644 --- a/README.md +++ b/README.md @@ -27,9 +27,7 @@ $ source ~/.zshenv ## Summary -Mistral AI API: Dora OpenAPI schema - -Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it. +Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it. @@ -161,8 +159,8 @@ with Mistral( res = mistral.chat.complete(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=False, response_format={ "type": "text", @@ -190,8 +188,8 @@ async def main(): res = await mistral.chat.complete_async(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=False, response_format={ "type": "text", @@ -269,8 +267,8 @@ with Mistral( res = mistral.agents.complete(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=False, response_format={ "type": "text", @@ -298,8 +296,8 @@ async def main(): res = await mistral.agents.complete_async(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=False, response_format={ "type": "text", @@ -616,7 +614,14 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.start_stream(inputs="", stream=True, completion_args={ + res = mistral.beta.conversations.start_stream(inputs=[ + { + "object": "entry", + "type": "function.result", + "tool_call_id": "", + "result": "", + }, + ], stream=True, completion_args={ "response_format": { "type": "text", }, @@ -653,7 +658,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.upload(library_id="f973c54e-979a-4464-9d36-8cc31beb21fe", file={ + res = mistral.beta.libraries.documents.upload(library_id="a02150d9-5ee0-4877-b62c-28b1fcdf3b76", file={ "file_name": "example.file", "content": open("example.file", "rb"), }) @@ -680,8 +685,8 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.models.list( - retries=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) + res = mistral.models.list(, + RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) # Handle response print(res) @@ -711,7 +716,7 @@ with Mistral( ## Error Handling -[`MistralError`](./src/mistralai/client/models/mistralerror.py) is the base class for all HTTP error responses. It has the following properties: +[`MistralError`](./src/mistralai/client/errors/mistralerror.py) is the base class for all HTTP error responses. It has the following properties: | Property | Type | Description | | ------------------ | ---------------- | --------------------------------------------------------------------------------------- | @@ -724,8 +729,7 @@ with Mistral( ### Example ```python -import mistralai.client -from mistralai.client import Mistral, models +from mistralai.client import Mistral, errors import os @@ -741,7 +745,7 @@ with Mistral( print(res) - except models.MistralError as e: + except errors.MistralError as e: # The base class for HTTP error responses print(e.message) print(e.status_code) @@ -750,13 +754,13 @@ with Mistral( print(e.raw_response) # Depending on the method different errors may be thrown - if isinstance(e, models.HTTPValidationError): - print(e.data.detail) # Optional[List[mistralai.client.ValidationError]] + if isinstance(e, errors.HTTPValidationError): + print(e.data.detail) # Optional[List[models.ValidationError]] ``` ### Error Classes **Primary error:** -* [`MistralError`](./src/mistralai/client/models/mistralerror.py): The base class for HTTP error responses. +* [`MistralError`](./src/mistralai/client/errors/mistralerror.py): The base class for HTTP error responses.
Less common errors (6) @@ -768,9 +772,9 @@ with Mistral( * [`httpx.TimeoutException`](https://www.python-httpx.org/exceptions/#httpx.TimeoutException): HTTP request timed out. -**Inherit from [`MistralError`](./src/mistralai/client/models/mistralerror.py)**: -* [`HTTPValidationError`](./src/mistralai/client/models/httpvalidationerror.py): Validation Error. Status code `422`. Applicable to 53 of 75 methods.* -* [`ResponseValidationError`](./src/mistralai/client/models/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute. +**Inherit from [`MistralError`](./src/mistralai/client/errors/mistralerror.py)**: +* [`HTTPValidationError`](./src/mistralai/client/errors/httpvalidationerror.py): Validation Error. Status code `422`. Applicable to 53 of 75 methods.* +* [`ResponseValidationError`](./src/mistralai/client/errors/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute.
diff --git a/USAGE.md b/USAGE.md index 18103864..f71bbabc 100644 --- a/USAGE.md +++ b/USAGE.md @@ -15,8 +15,8 @@ with Mistral( res = mistral.chat.complete(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=False, response_format={ "type": "text", @@ -44,8 +44,8 @@ async def main(): res = await mistral.chat.complete_async(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=False, response_format={ "type": "text", @@ -123,8 +123,8 @@ with Mistral( res = mistral.agents.complete(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=False, response_format={ "type": "text", @@ -152,8 +152,8 @@ async def main(): res = await mistral.agents.complete_async(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=False, response_format={ "type": "text", diff --git a/docs/models/httpvalidationerror.md b/docs/errors/httpvalidationerror.md similarity index 100% rename from docs/models/httpvalidationerror.md rename to docs/errors/httpvalidationerror.md diff --git a/docs/models/agent.md b/docs/models/agent.md index e335d889..4de5a901 100644 --- a/docs/models/agent.md +++ b/docs/models/agent.md @@ -13,7 +13,7 @@ | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | -| `object` | [Optional[models.AgentObject]](../models/agentobject.md) | :heavy_minus_sign: | N/A | +| `object` | *Optional[Literal["agent"]]* | :heavy_minus_sign: | N/A | | `id` | *str* | :heavy_check_mark: | N/A | | `version` | *int* | :heavy_check_mark: | N/A | | `versions` | List[*int*] | :heavy_check_mark: | N/A | diff --git a/docs/models/agentconversation.md b/docs/models/agentconversation.md index a2d61731..451f6fb8 100644 --- a/docs/models/agentconversation.md +++ b/docs/models/agentconversation.md @@ -8,7 +8,7 @@ | `name` | *OptionalNullable[str]* | :heavy_minus_sign: | Name given to the conversation. | | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | Description of the what the conversation is about. | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | -| `object` | [Optional[models.AgentConversationObject]](../models/agentconversationobject.md) | :heavy_minus_sign: | N/A | +| `object` | *Optional[Literal["conversation"]]* | :heavy_minus_sign: | N/A | | `id` | *str* | :heavy_check_mark: | N/A | | `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | | `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | diff --git a/docs/models/agentconversationobject.md b/docs/models/agentconversationobject.md deleted file mode 100644 index ea7cc75c..00000000 --- a/docs/models/agentconversationobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# AgentConversationObject - - -## Values - -| Name | Value | -| -------------- | -------------- | -| `CONVERSATION` | conversation | \ No newline at end of file diff --git a/docs/models/agentcreationrequest.md b/docs/models/agentcreationrequest.md deleted file mode 100644 index f0f0fdbc..00000000 --- a/docs/models/agentcreationrequest.md +++ /dev/null @@ -1,16 +0,0 @@ -# AgentCreationRequest - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | -| `tools` | List[[models.AgentCreationRequestTool](../models/agentcreationrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | -| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | -| `model` | *str* | :heavy_check_mark: | N/A | -| `name` | *str* | :heavy_check_mark: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | -| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | -| `version_message` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/agenthandoffentry.md b/docs/models/agenthandoffentry.md index 8831b0eb..2b689ec7 100644 --- a/docs/models/agenthandoffentry.md +++ b/docs/models/agenthandoffentry.md @@ -3,14 +3,14 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `object` | [Optional[models.AgentHandoffEntryObject]](../models/agenthandoffentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.AgentHandoffEntryType]](../models/agenthandoffentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `previous_agent_id` | *str* | :heavy_check_mark: | N/A | -| `previous_agent_name` | *str* | :heavy_check_mark: | N/A | -| `next_agent_id` | *str* | :heavy_check_mark: | N/A | -| `next_agent_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["agent.handoff"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `previous_agent_id` | *str* | :heavy_check_mark: | N/A | +| `previous_agent_name` | *str* | :heavy_check_mark: | N/A | +| `next_agent_id` | *str* | :heavy_check_mark: | N/A | +| `next_agent_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/agenthandoffentryobject.md b/docs/models/agenthandoffentryobject.md deleted file mode 100644 index 4bb876fb..00000000 --- a/docs/models/agenthandoffentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# AgentHandoffEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/agenthandoffentrytype.md b/docs/models/agenthandoffentrytype.md deleted file mode 100644 index 527ebceb..00000000 --- a/docs/models/agenthandoffentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# AgentHandoffEntryType - - -## Values - -| Name | Value | -| --------------- | --------------- | -| `AGENT_HANDOFF` | agent.handoff | \ No newline at end of file diff --git a/docs/models/agentobject.md b/docs/models/agentobject.md deleted file mode 100644 index 70e143b0..00000000 --- a/docs/models/agentobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# AgentObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `AGENT` | agent | \ No newline at end of file diff --git a/docs/models/createorupdateagentaliasrequest.md b/docs/models/agentsapiv1agentscreateorupdatealiasrequest.md similarity index 90% rename from docs/models/createorupdateagentaliasrequest.md rename to docs/models/agentsapiv1agentscreateorupdatealiasrequest.md index af2591eb..79406434 100644 --- a/docs/models/createorupdateagentaliasrequest.md +++ b/docs/models/agentsapiv1agentscreateorupdatealiasrequest.md @@ -1,4 +1,4 @@ -# CreateOrUpdateAgentAliasRequest +# AgentsAPIV1AgentsCreateOrUpdateAliasRequest ## Fields diff --git a/docs/models/deleteagentaliasrequest.md b/docs/models/agentsapiv1agentsdeletealiasrequest.md similarity index 90% rename from docs/models/deleteagentaliasrequest.md rename to docs/models/agentsapiv1agentsdeletealiasrequest.md index 17812ec4..8e95c0c3 100644 --- a/docs/models/deleteagentaliasrequest.md +++ b/docs/models/agentsapiv1agentsdeletealiasrequest.md @@ -1,4 +1,4 @@ -# DeleteAgentAliasRequest +# AgentsAPIV1AgentsDeleteAliasRequest ## Fields diff --git a/docs/models/deleteagentrequest.md b/docs/models/agentsapiv1agentsdeleterequest.md similarity index 89% rename from docs/models/deleteagentrequest.md rename to docs/models/agentsapiv1agentsdeleterequest.md index 0aaacae4..2799f418 100644 --- a/docs/models/deleteagentrequest.md +++ b/docs/models/agentsapiv1agentsdeleterequest.md @@ -1,4 +1,4 @@ -# DeleteAgentRequest +# AgentsAPIV1AgentsDeleteRequest ## Fields diff --git a/docs/models/getagentagentversion.md b/docs/models/agentsapiv1agentsgetagentversion.md similarity index 79% rename from docs/models/getagentagentversion.md rename to docs/models/agentsapiv1agentsgetagentversion.md index 6d7b3f1d..7fb9f2d5 100644 --- a/docs/models/getagentagentversion.md +++ b/docs/models/agentsapiv1agentsgetagentversion.md @@ -1,4 +1,4 @@ -# GetAgentAgentVersion +# AgentsAPIV1AgentsGetAgentVersion ## Supported Types diff --git a/docs/models/agentsapiv1agentsgetrequest.md b/docs/models/agentsapiv1agentsgetrequest.md new file mode 100644 index 00000000..ceffe009 --- /dev/null +++ b/docs/models/agentsapiv1agentsgetrequest.md @@ -0,0 +1,9 @@ +# AgentsAPIV1AgentsGetRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `agent_version` | [OptionalNullable[models.AgentsAPIV1AgentsGetAgentVersion]](../models/agentsapiv1agentsgetagentversion.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/getagentversionrequest.md b/docs/models/agentsapiv1agentsgetversionrequest.md similarity index 90% rename from docs/models/getagentversionrequest.md rename to docs/models/agentsapiv1agentsgetversionrequest.md index c98fee9d..96a73589 100644 --- a/docs/models/getagentversionrequest.md +++ b/docs/models/agentsapiv1agentsgetversionrequest.md @@ -1,4 +1,4 @@ -# GetAgentVersionRequest +# AgentsAPIV1AgentsGetVersionRequest ## Fields diff --git a/docs/models/listagentsrequest.md b/docs/models/agentsapiv1agentslistrequest.md similarity index 98% rename from docs/models/listagentsrequest.md rename to docs/models/agentsapiv1agentslistrequest.md index 79aec3ea..4785a54c 100644 --- a/docs/models/listagentsrequest.md +++ b/docs/models/agentsapiv1agentslistrequest.md @@ -1,4 +1,4 @@ -# ListAgentsRequest +# AgentsAPIV1AgentsListRequest ## Fields diff --git a/docs/models/listagentaliasesrequest.md b/docs/models/agentsapiv1agentslistversionaliasesrequest.md similarity index 85% rename from docs/models/listagentaliasesrequest.md rename to docs/models/agentsapiv1agentslistversionaliasesrequest.md index b3570cb8..3083bf92 100644 --- a/docs/models/listagentaliasesrequest.md +++ b/docs/models/agentsapiv1agentslistversionaliasesrequest.md @@ -1,4 +1,4 @@ -# ListAgentAliasesRequest +# AgentsAPIV1AgentsListVersionAliasesRequest ## Fields diff --git a/docs/models/listagentversionsrequest.md b/docs/models/agentsapiv1agentslistversionsrequest.md similarity index 94% rename from docs/models/listagentversionsrequest.md rename to docs/models/agentsapiv1agentslistversionsrequest.md index ba8ddaa5..91831700 100644 --- a/docs/models/listagentversionsrequest.md +++ b/docs/models/agentsapiv1agentslistversionsrequest.md @@ -1,4 +1,4 @@ -# ListAgentVersionsRequest +# AgentsAPIV1AgentsListVersionsRequest ## Fields diff --git a/docs/models/agentsapiv1agentsupdaterequest.md b/docs/models/agentsapiv1agentsupdaterequest.md new file mode 100644 index 00000000..7ef60bec --- /dev/null +++ b/docs/models/agentsapiv1agentsupdaterequest.md @@ -0,0 +1,9 @@ +# AgentsAPIV1AgentsUpdateRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `update_agent_request` | [models.UpdateAgentRequest](../models/updateagentrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/updateagentversionrequest.md b/docs/models/agentsapiv1agentsupdateversionrequest.md similarity index 89% rename from docs/models/updateagentversionrequest.md rename to docs/models/agentsapiv1agentsupdateversionrequest.md index b83eb867..e937acc9 100644 --- a/docs/models/updateagentversionrequest.md +++ b/docs/models/agentsapiv1agentsupdateversionrequest.md @@ -1,4 +1,4 @@ -# UpdateAgentVersionRequest +# AgentsAPIV1AgentsUpdateVersionRequest ## Fields diff --git a/docs/models/appendconversationrequest.md b/docs/models/agentsapiv1conversationsappendrequest.md similarity index 96% rename from docs/models/appendconversationrequest.md rename to docs/models/agentsapiv1conversationsappendrequest.md index 977d8e8b..ac8a00ec 100644 --- a/docs/models/appendconversationrequest.md +++ b/docs/models/agentsapiv1conversationsappendrequest.md @@ -1,4 +1,4 @@ -# AppendConversationRequest +# AgentsAPIV1ConversationsAppendRequest ## Fields diff --git a/docs/models/appendconversationstreamrequest.md b/docs/models/agentsapiv1conversationsappendstreamrequest.md similarity index 96% rename from docs/models/appendconversationstreamrequest.md rename to docs/models/agentsapiv1conversationsappendstreamrequest.md index a23231c2..dbc330f1 100644 --- a/docs/models/appendconversationstreamrequest.md +++ b/docs/models/agentsapiv1conversationsappendstreamrequest.md @@ -1,4 +1,4 @@ -# AppendConversationStreamRequest +# AgentsAPIV1ConversationsAppendStreamRequest ## Fields diff --git a/docs/models/getconversationrequest.md b/docs/models/agentsapiv1conversationsdeleterequest.md similarity index 95% rename from docs/models/getconversationrequest.md rename to docs/models/agentsapiv1conversationsdeleterequest.md index 8a66a8b0..c6eed281 100644 --- a/docs/models/getconversationrequest.md +++ b/docs/models/agentsapiv1conversationsdeleterequest.md @@ -1,4 +1,4 @@ -# GetConversationRequest +# AgentsAPIV1ConversationsDeleteRequest ## Fields diff --git a/docs/models/deleteconversationrequest.md b/docs/models/agentsapiv1conversationsgetrequest.md similarity index 95% rename from docs/models/deleteconversationrequest.md rename to docs/models/agentsapiv1conversationsgetrequest.md index 39d9e5df..67d450c8 100644 --- a/docs/models/deleteconversationrequest.md +++ b/docs/models/agentsapiv1conversationsgetrequest.md @@ -1,4 +1,4 @@ -# DeleteConversationRequest +# AgentsAPIV1ConversationsGetRequest ## Fields diff --git a/docs/models/getconversationhistoryrequest.md b/docs/models/agentsapiv1conversationshistoryrequest.md similarity index 94% rename from docs/models/getconversationhistoryrequest.md rename to docs/models/agentsapiv1conversationshistoryrequest.md index fc90282b..7e5d39e9 100644 --- a/docs/models/getconversationhistoryrequest.md +++ b/docs/models/agentsapiv1conversationshistoryrequest.md @@ -1,4 +1,4 @@ -# GetConversationHistoryRequest +# AgentsAPIV1ConversationsHistoryRequest ## Fields diff --git a/docs/models/listconversationsrequest.md b/docs/models/agentsapiv1conversationslistrequest.md similarity index 92% rename from docs/models/listconversationsrequest.md rename to docs/models/agentsapiv1conversationslistrequest.md index d99b4208..62c9011f 100644 --- a/docs/models/listconversationsrequest.md +++ b/docs/models/agentsapiv1conversationslistrequest.md @@ -1,4 +1,4 @@ -# ListConversationsRequest +# AgentsAPIV1ConversationsListRequest ## Fields diff --git a/docs/models/listconversationsresponse.md b/docs/models/agentsapiv1conversationslistresponse.md similarity index 84% rename from docs/models/listconversationsresponse.md rename to docs/models/agentsapiv1conversationslistresponse.md index 9d611c55..b233ee20 100644 --- a/docs/models/listconversationsresponse.md +++ b/docs/models/agentsapiv1conversationslistresponse.md @@ -1,4 +1,4 @@ -# ListConversationsResponse +# AgentsAPIV1ConversationsListResponse ## Supported Types diff --git a/docs/models/getconversationmessagesrequest.md b/docs/models/agentsapiv1conversationsmessagesrequest.md similarity index 94% rename from docs/models/getconversationmessagesrequest.md rename to docs/models/agentsapiv1conversationsmessagesrequest.md index fd037fea..a91ab046 100644 --- a/docs/models/getconversationmessagesrequest.md +++ b/docs/models/agentsapiv1conversationsmessagesrequest.md @@ -1,4 +1,4 @@ -# GetConversationMessagesRequest +# AgentsAPIV1ConversationsMessagesRequest ## Fields diff --git a/docs/models/restartconversationrequest.md b/docs/models/agentsapiv1conversationsrestartrequest.md similarity index 96% rename from docs/models/restartconversationrequest.md rename to docs/models/agentsapiv1conversationsrestartrequest.md index f24f14e6..a18a41f5 100644 --- a/docs/models/restartconversationrequest.md +++ b/docs/models/agentsapiv1conversationsrestartrequest.md @@ -1,4 +1,4 @@ -# RestartConversationRequest +# AgentsAPIV1ConversationsRestartRequest ## Fields diff --git a/docs/models/restartconversationstreamrequest.md b/docs/models/agentsapiv1conversationsrestartstreamrequest.md similarity index 96% rename from docs/models/restartconversationstreamrequest.md rename to docs/models/agentsapiv1conversationsrestartstreamrequest.md index daa661a9..7548286a 100644 --- a/docs/models/restartconversationstreamrequest.md +++ b/docs/models/agentsapiv1conversationsrestartstreamrequest.md @@ -1,4 +1,4 @@ -# RestartConversationStreamRequest +# AgentsAPIV1ConversationsRestartStreamRequest ## Fields diff --git a/docs/models/agentscompletionrequest.md b/docs/models/agentscompletionrequest.md index d87dc7da..33435732 100644 --- a/docs/models/agentscompletionrequest.md +++ b/docs/models/agentscompletionrequest.md @@ -11,7 +11,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.AgentsCompletionRequestMessage](../models/agentscompletionrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.AgentsCompletionRequestToolChoice]](../models/agentscompletionrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/docs/models/agentscompletionstreamrequest.md b/docs/models/agentscompletionstreamrequest.md index dd1804a1..407be8e0 100644 --- a/docs/models/agentscompletionstreamrequest.md +++ b/docs/models/agentscompletionstreamrequest.md @@ -11,7 +11,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.AgentsCompletionStreamRequestMessage](../models/agentscompletionstreamrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.AgentsCompletionStreamRequestToolChoice]](../models/agentscompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/docs/models/unarchiveftmodelout.md b/docs/models/archivemodelresponse.md similarity index 96% rename from docs/models/unarchiveftmodelout.md rename to docs/models/archivemodelresponse.md index 12c3d745..276656d1 100644 --- a/docs/models/unarchiveftmodelout.md +++ b/docs/models/archivemodelresponse.md @@ -1,4 +1,4 @@ -# UnarchiveFTModelOut +# ArchiveModelResponse ## Fields diff --git a/docs/models/assistantmessage.md b/docs/models/assistantmessage.md index 3d0bd90b..9ef63837 100644 --- a/docs/models/assistantmessage.md +++ b/docs/models/assistantmessage.md @@ -5,7 +5,7 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `role` | *Optional[Literal["assistant"]]* | :heavy_minus_sign: | N/A | | `content` | [OptionalNullable[models.AssistantMessageContent]](../models/assistantmessagecontent.md) | :heavy_minus_sign: | N/A | | `tool_calls` | List[[models.ToolCall](../models/toolcall.md)] | :heavy_minus_sign: | N/A | -| `prefix` | *Optional[bool]* | :heavy_minus_sign: | Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message. | -| `role` | [Optional[models.AssistantMessageRole]](../models/assistantmessagerole.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `prefix` | *Optional[bool]* | :heavy_minus_sign: | Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message. | \ No newline at end of file diff --git a/docs/models/assistantmessagerole.md b/docs/models/assistantmessagerole.md deleted file mode 100644 index 658229e7..00000000 --- a/docs/models/assistantmessagerole.md +++ /dev/null @@ -1,8 +0,0 @@ -# AssistantMessageRole - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `ASSISTANT` | assistant | \ No newline at end of file diff --git a/docs/models/audiochunk.md b/docs/models/audiochunk.md index 8a04af04..1ba8b0f5 100644 --- a/docs/models/audiochunk.md +++ b/docs/models/audiochunk.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ------------------------ | ------------------------ | ------------------------ | ------------------------ | -| `input_audio` | *str* | :heavy_check_mark: | N/A | -| `type` | *Literal["input_audio"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `type` | *Literal["input_audio"]* | :heavy_check_mark: | N/A | +| `input_audio` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/audiotranscriptionrequest.md b/docs/models/audiotranscriptionrequest.md index d7f5bd51..80bd5301 100644 --- a/docs/models/audiotranscriptionrequest.md +++ b/docs/models/audiotranscriptionrequest.md @@ -5,7 +5,7 @@ | Field | Type | Required | Description | Example | | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | -| `model` | *str* | :heavy_check_mark: | ID of the model to be used. | voxtral-mini-latest | +| `model` | *str* | :heavy_check_mark: | ID of the model to be used. | **Example 1:** voxtral-mini-latest
**Example 2:** voxtral-mini-2507 | | `file` | [Optional[models.File]](../models/file.md) | :heavy_minus_sign: | N/A | | | `file_url` | *OptionalNullable[str]* | :heavy_minus_sign: | Url of a file to be transcribed | | | `file_id` | *OptionalNullable[str]* | :heavy_minus_sign: | ID of a file uploaded to /v1/files | | diff --git a/docs/models/batchjobout.md b/docs/models/batchjob.md similarity index 99% rename from docs/models/batchjobout.md rename to docs/models/batchjob.md index 5f101173..162e2cff 100644 --- a/docs/models/batchjobout.md +++ b/docs/models/batchjob.md @@ -1,4 +1,4 @@ -# BatchJobOut +# BatchJob ## Fields diff --git a/docs/models/batchjobsout.md b/docs/models/batchjobsout.md deleted file mode 100644 index 7a9d6f68..00000000 --- a/docs/models/batchjobsout.md +++ /dev/null @@ -1,10 +0,0 @@ -# BatchJobsOut - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | -| `data` | List[[models.BatchJobOut](../models/batchjobout.md)] | :heavy_minus_sign: | N/A | -| `object` | *Optional[Literal["list"]]* | :heavy_minus_sign: | N/A | -| `total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/cancelfinetuningjobresponse.md b/docs/models/cancelfinetuningjobresponse.md deleted file mode 100644 index c512342e..00000000 --- a/docs/models/cancelfinetuningjobresponse.md +++ /dev/null @@ -1,19 +0,0 @@ -# CancelFineTuningJobResponse - -OK - - -## Supported Types - -### `models.ClassifierDetailedJobOut` - -```python -value: models.ClassifierDetailedJobOut = /* values here */ -``` - -### `models.CompletionDetailedJobOut` - -```python -value: models.CompletionDetailedJobOut = /* values here */ -``` - diff --git a/docs/models/chatclassificationrequest.md b/docs/models/chatclassificationrequest.md index 910d62ae..ba9c95ea 100644 --- a/docs/models/chatclassificationrequest.md +++ b/docs/models/chatclassificationrequest.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | | `model` | *str* | :heavy_check_mark: | N/A | -| `inputs` | [models.Inputs](../models/inputs.md) | :heavy_check_mark: | Chat to classify | \ No newline at end of file +| `input` | [models.Inputs](../models/inputs.md) | :heavy_check_mark: | Chat to classify | \ No newline at end of file diff --git a/docs/models/chatcompletionrequest.md b/docs/models/chatcompletionrequest.md index f3abeeff..921161fa 100644 --- a/docs/models/chatcompletionrequest.md +++ b/docs/models/chatcompletionrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionRequestMessage](../models/chatcompletionrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionRequestToolChoice]](../models/chatcompletionrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/docs/models/chatcompletionstreamrequest.md b/docs/models/chatcompletionstreamrequest.md index 42792d39..8761f000 100644 --- a/docs/models/chatcompletionstreamrequest.md +++ b/docs/models/chatcompletionstreamrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionStreamRequestMessage](../models/chatcompletionstreamrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionStreamRequestToolChoice]](../models/chatcompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/docs/models/checkpointout.md b/docs/models/checkpoint.md similarity index 96% rename from docs/models/checkpointout.md rename to docs/models/checkpoint.md index 053592d2..f7f35530 100644 --- a/docs/models/checkpointout.md +++ b/docs/models/checkpoint.md @@ -1,10 +1,10 @@ -# CheckpointOut +# Checkpoint ## Fields | Field | Type | Required | Description | Example | | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `metrics` | [models.MetricOut](../models/metricout.md) | :heavy_check_mark: | Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase). | | +| `metrics` | [models.Metric](../models/metric.md) | :heavy_check_mark: | Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase). | | | `step_number` | *int* | :heavy_check_mark: | The step number that the checkpoint was created at. | | | `created_at` | *int* | :heavy_check_mark: | The UNIX timestamp (in seconds) for when the checkpoint was created. | 1716963433 | \ No newline at end of file diff --git a/docs/models/classifierdetailedjoboutintegration.md b/docs/models/classifierdetailedjoboutintegration.md deleted file mode 100644 index 9dfa6e8a..00000000 --- a/docs/models/classifierdetailedjoboutintegration.md +++ /dev/null @@ -1,11 +0,0 @@ -# ClassifierDetailedJobOutIntegration - - -## Supported Types - -### `models.WandbIntegrationOut` - -```python -value: models.WandbIntegrationOut = /* values here */ -``` - diff --git a/docs/models/classifierfinetunedmodel.md b/docs/models/classifierfinetunedmodel.md new file mode 100644 index 00000000..ad05f931 --- /dev/null +++ b/docs/models/classifierfinetunedmodel.md @@ -0,0 +1,23 @@ +# ClassifierFineTunedModel + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `id` | *str* | :heavy_check_mark: | N/A | +| `object` | *Optional[Literal["model"]]* | :heavy_minus_sign: | N/A | +| `created` | *int* | :heavy_check_mark: | N/A | +| `owned_by` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | +| `root` | *str* | :heavy_check_mark: | N/A | +| `root_version` | *str* | :heavy_check_mark: | N/A | +| `archived` | *bool* | :heavy_check_mark: | N/A | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `capabilities` | [models.FineTunedModelCapabilities](../models/finetunedmodelcapabilities.md) | :heavy_check_mark: | N/A | +| `max_context_length` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `aliases` | List[*str*] | :heavy_minus_sign: | N/A | +| `job` | *str* | :heavy_check_mark: | N/A | +| `classifier_targets` | List[[models.ClassifierTargetResult](../models/classifiertargetresult.md)] | :heavy_check_mark: | N/A | +| `model_type` | *Literal["classifier"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/classifierjobout.md b/docs/models/classifierfinetuningjob.md similarity index 97% rename from docs/models/classifierjobout.md rename to docs/models/classifierfinetuningjob.md index ceecef5d..369756ba 100644 --- a/docs/models/classifierjobout.md +++ b/docs/models/classifierfinetuningjob.md @@ -1,4 +1,4 @@ -# ClassifierJobOut +# ClassifierFineTuningJob ## Fields @@ -8,7 +8,7 @@ | `id` | *str* | :heavy_check_mark: | The ID of the job. | | `auto_start` | *bool* | :heavy_check_mark: | N/A | | `model` | *str* | :heavy_check_mark: | N/A | -| `status` | [models.ClassifierJobOutStatus](../models/classifierjoboutstatus.md) | :heavy_check_mark: | The current status of the fine-tuning job. | +| `status` | [models.ClassifierFineTuningJobStatus](../models/classifierfinetuningjobstatus.md) | :heavy_check_mark: | The current status of the fine-tuning job. | | `created_at` | *int* | :heavy_check_mark: | The UNIX timestamp (in seconds) for when the fine-tuning job was created. | | `modified_at` | *int* | :heavy_check_mark: | The UNIX timestamp (in seconds) for when the fine-tuning job was last modified. | | `training_files` | List[*str*] | :heavy_check_mark: | A list containing the IDs of uploaded files that contain training data. | @@ -16,8 +16,8 @@ | `object` | *Optional[Literal["job"]]* | :heavy_minus_sign: | The object type of the fine-tuning job. | | `fine_tuned_model` | *OptionalNullable[str]* | :heavy_minus_sign: | The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running. | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`. | -| `integrations` | List[[models.ClassifierJobOutIntegration](../models/classifierjoboutintegration.md)] | :heavy_minus_sign: | A list of integrations enabled for your fine-tuning job. | +| `integrations` | List[[models.ClassifierFineTuningJobIntegration](../models/classifierfinetuningjobintegration.md)] | :heavy_minus_sign: | A list of integrations enabled for your fine-tuning job. | | `trained_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | Total number of tokens trained. | -| `metadata` | [OptionalNullable[models.JobMetadataOut]](../models/jobmetadataout.md) | :heavy_minus_sign: | N/A | +| `metadata` | [OptionalNullable[models.JobMetadata]](../models/jobmetadata.md) | :heavy_minus_sign: | N/A | | `job_type` | *Literal["classifier"]* | :heavy_check_mark: | The type of job (`FT` for fine-tuning). | | `hyperparameters` | [models.ClassifierTrainingParameters](../models/classifiertrainingparameters.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/classifierdetailedjobout.md b/docs/models/classifierfinetuningjobdetails.md similarity index 94% rename from docs/models/classifierdetailedjobout.md rename to docs/models/classifierfinetuningjobdetails.md index fb532449..c5efdf1c 100644 --- a/docs/models/classifierdetailedjobout.md +++ b/docs/models/classifierfinetuningjobdetails.md @@ -1,4 +1,4 @@ -# ClassifierDetailedJobOut +# ClassifierFineTuningJobDetails ## Fields @@ -8,7 +8,7 @@ | `id` | *str* | :heavy_check_mark: | N/A | | `auto_start` | *bool* | :heavy_check_mark: | N/A | | `model` | *str* | :heavy_check_mark: | N/A | -| `status` | [models.ClassifierDetailedJobOutStatus](../models/classifierdetailedjoboutstatus.md) | :heavy_check_mark: | N/A | +| `status` | [models.ClassifierFineTuningJobDetailsStatus](../models/classifierfinetuningjobdetailsstatus.md) | :heavy_check_mark: | N/A | | `created_at` | *int* | :heavy_check_mark: | N/A | | `modified_at` | *int* | :heavy_check_mark: | N/A | | `training_files` | List[*str*] | :heavy_check_mark: | N/A | @@ -16,11 +16,11 @@ | `object` | *Optional[Literal["job"]]* | :heavy_minus_sign: | N/A | | `fine_tuned_model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `integrations` | List[[models.ClassifierDetailedJobOutIntegration](../models/classifierdetailedjoboutintegration.md)] | :heavy_minus_sign: | N/A | +| `integrations` | List[[models.ClassifierFineTuningJobDetailsIntegration](../models/classifierfinetuningjobdetailsintegration.md)] | :heavy_minus_sign: | N/A | | `trained_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `metadata` | [OptionalNullable[models.JobMetadataOut]](../models/jobmetadataout.md) | :heavy_minus_sign: | N/A | +| `metadata` | [OptionalNullable[models.JobMetadata]](../models/jobmetadata.md) | :heavy_minus_sign: | N/A | | `job_type` | *Literal["classifier"]* | :heavy_check_mark: | N/A | | `hyperparameters` | [models.ClassifierTrainingParameters](../models/classifiertrainingparameters.md) | :heavy_check_mark: | N/A | -| `events` | List[[models.EventOut](../models/eventout.md)] | :heavy_minus_sign: | Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here. | -| `checkpoints` | List[[models.CheckpointOut](../models/checkpointout.md)] | :heavy_minus_sign: | N/A | -| `classifier_targets` | List[[models.ClassifierTargetOut](../models/classifiertargetout.md)] | :heavy_check_mark: | N/A | \ No newline at end of file +| `events` | List[[models.Event](../models/event.md)] | :heavy_minus_sign: | Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here. | +| `checkpoints` | List[[models.Checkpoint](../models/checkpoint.md)] | :heavy_minus_sign: | N/A | +| `classifier_targets` | List[[models.ClassifierTargetResult](../models/classifiertargetresult.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/classifierfinetuningjobdetailsintegration.md b/docs/models/classifierfinetuningjobdetailsintegration.md new file mode 100644 index 00000000..438a35d9 --- /dev/null +++ b/docs/models/classifierfinetuningjobdetailsintegration.md @@ -0,0 +1,11 @@ +# ClassifierFineTuningJobDetailsIntegration + + +## Supported Types + +### `models.WandbIntegrationResult` + +```python +value: models.WandbIntegrationResult = /* values here */ +``` + diff --git a/docs/models/classifierdetailedjoboutstatus.md b/docs/models/classifierfinetuningjobdetailsstatus.md similarity index 94% rename from docs/models/classifierdetailedjoboutstatus.md rename to docs/models/classifierfinetuningjobdetailsstatus.md index c3118aaf..058c6583 100644 --- a/docs/models/classifierdetailedjoboutstatus.md +++ b/docs/models/classifierfinetuningjobdetailsstatus.md @@ -1,4 +1,4 @@ -# ClassifierDetailedJobOutStatus +# ClassifierFineTuningJobDetailsStatus ## Values diff --git a/docs/models/classifierfinetuningjobintegration.md b/docs/models/classifierfinetuningjobintegration.md new file mode 100644 index 00000000..820aee4c --- /dev/null +++ b/docs/models/classifierfinetuningjobintegration.md @@ -0,0 +1,11 @@ +# ClassifierFineTuningJobIntegration + + +## Supported Types + +### `models.WandbIntegrationResult` + +```python +value: models.WandbIntegrationResult = /* values here */ +``` + diff --git a/docs/models/completionjoboutstatus.md b/docs/models/classifierfinetuningjobstatus.md similarity index 95% rename from docs/models/completionjoboutstatus.md rename to docs/models/classifierfinetuningjobstatus.md index 91754945..ca829885 100644 --- a/docs/models/completionjoboutstatus.md +++ b/docs/models/classifierfinetuningjobstatus.md @@ -1,4 +1,4 @@ -# CompletionJobOutStatus +# ClassifierFineTuningJobStatus The current status of the fine-tuning job. diff --git a/docs/models/classifierftmodelout.md b/docs/models/classifierftmodelout.md deleted file mode 100644 index 6e7afbbe..00000000 --- a/docs/models/classifierftmodelout.md +++ /dev/null @@ -1,23 +0,0 @@ -# ClassifierFTModelOut - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `id` | *str* | :heavy_check_mark: | N/A | -| `object` | *Optional[Literal["model"]]* | :heavy_minus_sign: | N/A | -| `created` | *int* | :heavy_check_mark: | N/A | -| `owned_by` | *str* | :heavy_check_mark: | N/A | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | -| `root` | *str* | :heavy_check_mark: | N/A | -| `root_version` | *str* | :heavy_check_mark: | N/A | -| `archived` | *bool* | :heavy_check_mark: | N/A | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `capabilities` | [models.FTModelCapabilitiesOut](../models/ftmodelcapabilitiesout.md) | :heavy_check_mark: | N/A | -| `max_context_length` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `aliases` | List[*str*] | :heavy_minus_sign: | N/A | -| `job` | *str* | :heavy_check_mark: | N/A | -| `classifier_targets` | List[[models.ClassifierTargetOut](../models/classifiertargetout.md)] | :heavy_check_mark: | N/A | -| `model_type` | *Literal["classifier"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/classifierjoboutintegration.md b/docs/models/classifierjoboutintegration.md deleted file mode 100644 index 33af8a70..00000000 --- a/docs/models/classifierjoboutintegration.md +++ /dev/null @@ -1,11 +0,0 @@ -# ClassifierJobOutIntegration - - -## Supported Types - -### `models.WandbIntegrationOut` - -```python -value: models.WandbIntegrationOut = /* values here */ -``` - diff --git a/docs/models/classifiertargetin.md b/docs/models/classifiertarget.md similarity index 99% rename from docs/models/classifiertargetin.md rename to docs/models/classifiertarget.md index 78cab67b..f8c99e2e 100644 --- a/docs/models/classifiertargetin.md +++ b/docs/models/classifiertarget.md @@ -1,4 +1,4 @@ -# ClassifierTargetIn +# ClassifierTarget ## Fields diff --git a/docs/models/classifiertargetout.md b/docs/models/classifiertargetresult.md similarity index 98% rename from docs/models/classifiertargetout.md rename to docs/models/classifiertargetresult.md index 57535ae5..ccadc623 100644 --- a/docs/models/classifiertargetout.md +++ b/docs/models/classifiertargetresult.md @@ -1,4 +1,4 @@ -# ClassifierTargetOut +# ClassifierTargetResult ## Fields diff --git a/docs/models/classifiertrainingparametersin.md b/docs/models/classifiertrainingparametersin.md deleted file mode 100644 index 1287c973..00000000 --- a/docs/models/classifiertrainingparametersin.md +++ /dev/null @@ -1,15 +0,0 @@ -# ClassifierTrainingParametersIn - -The fine-tuning hyperparameter settings used in a classifier fine-tune job. - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `training_steps` | *OptionalNullable[int]* | :heavy_minus_sign: | The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset. | -| `learning_rate` | *Optional[float]* | :heavy_minus_sign: | A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process. | -| `weight_decay` | *OptionalNullable[float]* | :heavy_minus_sign: | (Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large. | -| `warmup_fraction` | *OptionalNullable[float]* | :heavy_minus_sign: | (Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune) | -| `epochs` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | -| `seq_len` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/codeinterpretertool.md b/docs/models/codeinterpretertool.md index 544cda93..6302fc62 100644 --- a/docs/models/codeinterpretertool.md +++ b/docs/models/codeinterpretertool.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------------- | ----------------------------- | ----------------------------- | ----------------------------- | -| `type` | *Literal["code_interpreter"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `tool_configuration` | [OptionalNullable[models.ToolConfiguration]](../models/toolconfiguration.md) | :heavy_minus_sign: | N/A | +| `type` | *Literal["code_interpreter"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/completionargs.md b/docs/models/completionargs.md index 60d09137..148f7608 100644 --- a/docs/models/completionargs.md +++ b/docs/models/completionargs.md @@ -5,15 +5,15 @@ White-listed arguments from the completion API ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `stop` | [OptionalNullable[models.CompletionArgsStop]](../models/completionargsstop.md) | :heavy_minus_sign: | N/A | | -| `presence_penalty` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | -| `frequency_penalty` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | -| `temperature` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | -| `top_p` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | -| `max_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | | -| `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | | -| `prediction` | [OptionalNullable[models.Prediction]](../models/prediction.md) | :heavy_minus_sign: | N/A | | -| `response_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | N/A | {
"type": "text"
} | -| `tool_choice` | [Optional[models.ToolChoiceEnum]](../models/toolchoiceenum.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `stop` | [OptionalNullable[models.CompletionArgsStop]](../models/completionargsstop.md) | :heavy_minus_sign: | N/A | | +| `presence_penalty` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | +| `frequency_penalty` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | +| `temperature` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | +| `top_p` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | | +| `max_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | | +| `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | | +| `prediction` | [OptionalNullable[models.Prediction]](../models/prediction.md) | :heavy_minus_sign: | N/A | | +| `response_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | N/A | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `tool_choice` | [Optional[models.ToolChoiceEnum]](../models/toolchoiceenum.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/completiondetailedjoboutintegration.md b/docs/models/completiondetailedjoboutintegration.md deleted file mode 100644 index 9e526053..00000000 --- a/docs/models/completiondetailedjoboutintegration.md +++ /dev/null @@ -1,11 +0,0 @@ -# CompletionDetailedJobOutIntegration - - -## Supported Types - -### `models.WandbIntegrationOut` - -```python -value: models.WandbIntegrationOut = /* values here */ -``` - diff --git a/docs/models/completiondetailedjoboutrepository.md b/docs/models/completiondetailedjoboutrepository.md deleted file mode 100644 index 92a7b75c..00000000 --- a/docs/models/completiondetailedjoboutrepository.md +++ /dev/null @@ -1,11 +0,0 @@ -# CompletionDetailedJobOutRepository - - -## Supported Types - -### `models.GithubRepositoryOut` - -```python -value: models.GithubRepositoryOut = /* values here */ -``` - diff --git a/docs/models/completionfinetunedmodel.md b/docs/models/completionfinetunedmodel.md new file mode 100644 index 00000000..0055db02 --- /dev/null +++ b/docs/models/completionfinetunedmodel.md @@ -0,0 +1,22 @@ +# CompletionFineTunedModel + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `id` | *str* | :heavy_check_mark: | N/A | +| `object` | *Optional[Literal["model"]]* | :heavy_minus_sign: | N/A | +| `created` | *int* | :heavy_check_mark: | N/A | +| `owned_by` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | +| `root` | *str* | :heavy_check_mark: | N/A | +| `root_version` | *str* | :heavy_check_mark: | N/A | +| `archived` | *bool* | :heavy_check_mark: | N/A | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `capabilities` | [models.FineTunedModelCapabilities](../models/finetunedmodelcapabilities.md) | :heavy_check_mark: | N/A | +| `max_context_length` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `aliases` | List[*str*] | :heavy_minus_sign: | N/A | +| `job` | *str* | :heavy_check_mark: | N/A | +| `model_type` | *Literal["completion"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/completionjobout.md b/docs/models/completionfinetuningjob.md similarity index 97% rename from docs/models/completionjobout.md rename to docs/models/completionfinetuningjob.md index 5eb44eef..83c0ae7e 100644 --- a/docs/models/completionjobout.md +++ b/docs/models/completionfinetuningjob.md @@ -1,4 +1,4 @@ -# CompletionJobOut +# CompletionFineTuningJob ## Fields @@ -8,7 +8,7 @@ | `id` | *str* | :heavy_check_mark: | The ID of the job. | | `auto_start` | *bool* | :heavy_check_mark: | N/A | | `model` | *str* | :heavy_check_mark: | N/A | -| `status` | [models.CompletionJobOutStatus](../models/completionjoboutstatus.md) | :heavy_check_mark: | The current status of the fine-tuning job. | +| `status` | [models.CompletionFineTuningJobStatus](../models/completionfinetuningjobstatus.md) | :heavy_check_mark: | The current status of the fine-tuning job. | | `created_at` | *int* | :heavy_check_mark: | The UNIX timestamp (in seconds) for when the fine-tuning job was created. | | `modified_at` | *int* | :heavy_check_mark: | The UNIX timestamp (in seconds) for when the fine-tuning job was last modified. | | `training_files` | List[*str*] | :heavy_check_mark: | A list containing the IDs of uploaded files that contain training data. | @@ -16,9 +16,9 @@ | `object` | *Optional[Literal["job"]]* | :heavy_minus_sign: | The object type of the fine-tuning job. | | `fine_tuned_model` | *OptionalNullable[str]* | :heavy_minus_sign: | The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running. | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`. | -| `integrations` | List[[models.CompletionJobOutIntegration](../models/completionjoboutintegration.md)] | :heavy_minus_sign: | A list of integrations enabled for your fine-tuning job. | +| `integrations` | List[[models.CompletionFineTuningJobIntegration](../models/completionfinetuningjobintegration.md)] | :heavy_minus_sign: | A list of integrations enabled for your fine-tuning job. | | `trained_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | Total number of tokens trained. | -| `metadata` | [OptionalNullable[models.JobMetadataOut]](../models/jobmetadataout.md) | :heavy_minus_sign: | N/A | +| `metadata` | [OptionalNullable[models.JobMetadata]](../models/jobmetadata.md) | :heavy_minus_sign: | N/A | | `job_type` | *Literal["completion"]* | :heavy_check_mark: | The type of job (`FT` for fine-tuning). | | `hyperparameters` | [models.CompletionTrainingParameters](../models/completiontrainingparameters.md) | :heavy_check_mark: | N/A | -| `repositories` | List[[models.CompletionJobOutRepository](../models/completionjoboutrepository.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file +| `repositories` | List[[models.CompletionFineTuningJobRepository](../models/completionfinetuningjobrepository.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/completiondetailedjobout.md b/docs/models/completionfinetuningjobdetails.md similarity index 94% rename from docs/models/completiondetailedjobout.md rename to docs/models/completionfinetuningjobdetails.md index bc7e5d1c..3c54e874 100644 --- a/docs/models/completiondetailedjobout.md +++ b/docs/models/completionfinetuningjobdetails.md @@ -1,4 +1,4 @@ -# CompletionDetailedJobOut +# CompletionFineTuningJobDetails ## Fields @@ -8,7 +8,7 @@ | `id` | *str* | :heavy_check_mark: | N/A | | `auto_start` | *bool* | :heavy_check_mark: | N/A | | `model` | *str* | :heavy_check_mark: | N/A | -| `status` | [models.CompletionDetailedJobOutStatus](../models/completiondetailedjoboutstatus.md) | :heavy_check_mark: | N/A | +| `status` | [models.CompletionFineTuningJobDetailsStatus](../models/completionfinetuningjobdetailsstatus.md) | :heavy_check_mark: | N/A | | `created_at` | *int* | :heavy_check_mark: | N/A | | `modified_at` | *int* | :heavy_check_mark: | N/A | | `training_files` | List[*str*] | :heavy_check_mark: | N/A | @@ -16,11 +16,11 @@ | `object` | *Optional[Literal["job"]]* | :heavy_minus_sign: | N/A | | `fine_tuned_model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `integrations` | List[[models.CompletionDetailedJobOutIntegration](../models/completiondetailedjoboutintegration.md)] | :heavy_minus_sign: | N/A | +| `integrations` | List[[models.CompletionFineTuningJobDetailsIntegration](../models/completionfinetuningjobdetailsintegration.md)] | :heavy_minus_sign: | N/A | | `trained_tokens` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `metadata` | [OptionalNullable[models.JobMetadataOut]](../models/jobmetadataout.md) | :heavy_minus_sign: | N/A | +| `metadata` | [OptionalNullable[models.JobMetadata]](../models/jobmetadata.md) | :heavy_minus_sign: | N/A | | `job_type` | *Literal["completion"]* | :heavy_check_mark: | N/A | | `hyperparameters` | [models.CompletionTrainingParameters](../models/completiontrainingparameters.md) | :heavy_check_mark: | N/A | -| `repositories` | List[[models.CompletionDetailedJobOutRepository](../models/completiondetailedjoboutrepository.md)] | :heavy_minus_sign: | N/A | -| `events` | List[[models.EventOut](../models/eventout.md)] | :heavy_minus_sign: | Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here. | -| `checkpoints` | List[[models.CheckpointOut](../models/checkpointout.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file +| `repositories` | List[[models.CompletionFineTuningJobDetailsRepository](../models/completionfinetuningjobdetailsrepository.md)] | :heavy_minus_sign: | N/A | +| `events` | List[[models.Event](../models/event.md)] | :heavy_minus_sign: | Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here. | +| `checkpoints` | List[[models.Checkpoint](../models/checkpoint.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/completionfinetuningjobdetailsintegration.md b/docs/models/completionfinetuningjobdetailsintegration.md new file mode 100644 index 00000000..38f6a349 --- /dev/null +++ b/docs/models/completionfinetuningjobdetailsintegration.md @@ -0,0 +1,11 @@ +# CompletionFineTuningJobDetailsIntegration + + +## Supported Types + +### `models.WandbIntegrationResult` + +```python +value: models.WandbIntegrationResult = /* values here */ +``` + diff --git a/docs/models/completionfinetuningjobdetailsrepository.md b/docs/models/completionfinetuningjobdetailsrepository.md new file mode 100644 index 00000000..c6bd67cd --- /dev/null +++ b/docs/models/completionfinetuningjobdetailsrepository.md @@ -0,0 +1,11 @@ +# CompletionFineTuningJobDetailsRepository + + +## Supported Types + +### `models.GithubRepository` + +```python +value: models.GithubRepository = /* values here */ +``` + diff --git a/docs/models/completiondetailedjoboutstatus.md b/docs/models/completionfinetuningjobdetailsstatus.md similarity index 94% rename from docs/models/completiondetailedjoboutstatus.md rename to docs/models/completionfinetuningjobdetailsstatus.md index b80525ba..94d795a9 100644 --- a/docs/models/completiondetailedjoboutstatus.md +++ b/docs/models/completionfinetuningjobdetailsstatus.md @@ -1,4 +1,4 @@ -# CompletionDetailedJobOutStatus +# CompletionFineTuningJobDetailsStatus ## Values diff --git a/docs/models/completionfinetuningjobintegration.md b/docs/models/completionfinetuningjobintegration.md new file mode 100644 index 00000000..dbe57417 --- /dev/null +++ b/docs/models/completionfinetuningjobintegration.md @@ -0,0 +1,11 @@ +# CompletionFineTuningJobIntegration + + +## Supported Types + +### `models.WandbIntegrationResult` + +```python +value: models.WandbIntegrationResult = /* values here */ +``` + diff --git a/docs/models/completionfinetuningjobrepository.md b/docs/models/completionfinetuningjobrepository.md new file mode 100644 index 00000000..54225e27 --- /dev/null +++ b/docs/models/completionfinetuningjobrepository.md @@ -0,0 +1,11 @@ +# CompletionFineTuningJobRepository + + +## Supported Types + +### `models.GithubRepository` + +```python +value: models.GithubRepository = /* values here */ +``` + diff --git a/docs/models/classifierjoboutstatus.md b/docs/models/completionfinetuningjobstatus.md similarity index 95% rename from docs/models/classifierjoboutstatus.md rename to docs/models/completionfinetuningjobstatus.md index 4520f164..db151a1b 100644 --- a/docs/models/classifierjoboutstatus.md +++ b/docs/models/completionfinetuningjobstatus.md @@ -1,4 +1,4 @@ -# ClassifierJobOutStatus +# CompletionFineTuningJobStatus The current status of the fine-tuning job. diff --git a/docs/models/completionftmodelout.md b/docs/models/completionftmodelout.md deleted file mode 100644 index ccd4844f..00000000 --- a/docs/models/completionftmodelout.md +++ /dev/null @@ -1,22 +0,0 @@ -# CompletionFTModelOut - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `id` | *str* | :heavy_check_mark: | N/A | -| `object` | *Optional[Literal["model"]]* | :heavy_minus_sign: | N/A | -| `created` | *int* | :heavy_check_mark: | N/A | -| `owned_by` | *str* | :heavy_check_mark: | N/A | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | -| `root` | *str* | :heavy_check_mark: | N/A | -| `root_version` | *str* | :heavy_check_mark: | N/A | -| `archived` | *bool* | :heavy_check_mark: | N/A | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `capabilities` | [models.FTModelCapabilitiesOut](../models/ftmodelcapabilitiesout.md) | :heavy_check_mark: | N/A | -| `max_context_length` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `aliases` | List[*str*] | :heavy_minus_sign: | N/A | -| `job` | *str* | :heavy_check_mark: | N/A | -| `model_type` | *Literal["completion"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/completionjoboutintegration.md b/docs/models/completionjoboutintegration.md deleted file mode 100644 index 6474747b..00000000 --- a/docs/models/completionjoboutintegration.md +++ /dev/null @@ -1,11 +0,0 @@ -# CompletionJobOutIntegration - - -## Supported Types - -### `models.WandbIntegrationOut` - -```python -value: models.WandbIntegrationOut = /* values here */ -``` - diff --git a/docs/models/completionjoboutrepository.md b/docs/models/completionjoboutrepository.md deleted file mode 100644 index 52f65558..00000000 --- a/docs/models/completionjoboutrepository.md +++ /dev/null @@ -1,11 +0,0 @@ -# CompletionJobOutRepository - - -## Supported Types - -### `models.GithubRepositoryOut` - -```python -value: models.GithubRepositoryOut = /* values here */ -``` - diff --git a/docs/models/completiontrainingparametersin.md b/docs/models/completiontrainingparametersin.md deleted file mode 100644 index 9fcc714e..00000000 --- a/docs/models/completiontrainingparametersin.md +++ /dev/null @@ -1,16 +0,0 @@ -# CompletionTrainingParametersIn - -The fine-tuning hyperparameter settings used in a fine-tune job. - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `training_steps` | *OptionalNullable[int]* | :heavy_minus_sign: | The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset. | -| `learning_rate` | *Optional[float]* | :heavy_minus_sign: | A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process. | -| `weight_decay` | *OptionalNullable[float]* | :heavy_minus_sign: | (Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large. | -| `warmup_fraction` | *OptionalNullable[float]* | :heavy_minus_sign: | (Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune) | -| `epochs` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | -| `seq_len` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `fim_ratio` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/confirmation.md b/docs/models/confirmation.md new file mode 100644 index 00000000..fd6e6aaa --- /dev/null +++ b/docs/models/confirmation.md @@ -0,0 +1,9 @@ +# Confirmation + + +## Values + +| Name | Value | +| ------- | ------- | +| `ALLOW` | allow | +| `DENY` | deny | \ No newline at end of file diff --git a/docs/models/conversationappendrequest.md b/docs/models/conversationappendrequest.md index 1cdb584b..78a96508 100644 --- a/docs/models/conversationappendrequest.md +++ b/docs/models/conversationappendrequest.md @@ -5,8 +5,9 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -| `inputs` | [models.ConversationInputs](../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationAppendRequestHandoffExecution]](../models/conversationappendrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | -| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | \ No newline at end of file +| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `tool_confirmations` | List[[models.ToolCallConfirmation](../models/toolcallconfirmation.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/conversationappendstreamrequest.md b/docs/models/conversationappendstreamrequest.md index a8516ea7..daea9c52 100644 --- a/docs/models/conversationappendstreamrequest.md +++ b/docs/models/conversationappendstreamrequest.md @@ -5,8 +5,9 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| `inputs` | [models.ConversationInputs](../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationAppendStreamRequestHandoffExecution]](../models/conversationappendstreamrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | -| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | \ No newline at end of file +| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `tool_confirmations` | List[[models.ToolCallConfirmation](../models/toolcallconfirmation.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/conversationhistory.md b/docs/models/conversationhistory.md index c8baad0b..daefe336 100644 --- a/docs/models/conversationhistory.md +++ b/docs/models/conversationhistory.md @@ -5,8 +5,8 @@ Retrieve all entries in a conversation. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `object` | [Optional[models.ConversationHistoryObject]](../models/conversationhistoryobject.md) | :heavy_minus_sign: | N/A | -| `conversation_id` | *str* | :heavy_check_mark: | N/A | -| `entries` | List[[models.Entry](../models/entry.md)] | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------- | ------------------------------------------- | ------------------------------------------- | ------------------------------------------- | +| `object` | *Optional[Literal["conversation.history"]]* | :heavy_minus_sign: | N/A | +| `conversation_id` | *str* | :heavy_check_mark: | N/A | +| `entries` | List[[models.Entry](../models/entry.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/conversationhistoryobject.md b/docs/models/conversationhistoryobject.md deleted file mode 100644 index a14e7f9c..00000000 --- a/docs/models/conversationhistoryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# ConversationHistoryObject - - -## Values - -| Name | Value | -| ---------------------- | ---------------------- | -| `CONVERSATION_HISTORY` | conversation.history | \ No newline at end of file diff --git a/docs/models/conversationmessages.md b/docs/models/conversationmessages.md index c3f00979..8fa51571 100644 --- a/docs/models/conversationmessages.md +++ b/docs/models/conversationmessages.md @@ -5,8 +5,8 @@ Similar to the conversation history but only keep the messages ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -| `object` | [Optional[models.ConversationMessagesObject]](../models/conversationmessagesobject.md) | :heavy_minus_sign: | N/A | -| `conversation_id` | *str* | :heavy_check_mark: | N/A | -| `messages` | List[[models.MessageEntries](../models/messageentries.md)] | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `object` | *Optional[Literal["conversation.messages"]]* | :heavy_minus_sign: | N/A | +| `conversation_id` | *str* | :heavy_check_mark: | N/A | +| `messages` | List[[models.MessageEntries](../models/messageentries.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/conversationmessagesobject.md b/docs/models/conversationmessagesobject.md deleted file mode 100644 index db3a441b..00000000 --- a/docs/models/conversationmessagesobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# ConversationMessagesObject - - -## Values - -| Name | Value | -| ----------------------- | ----------------------- | -| `CONVERSATION_MESSAGES` | conversation.messages | \ No newline at end of file diff --git a/docs/models/conversationresponse.md b/docs/models/conversationresponse.md index e3182128..2732f785 100644 --- a/docs/models/conversationresponse.md +++ b/docs/models/conversationresponse.md @@ -5,9 +5,9 @@ The response after appending new entries to the conversation. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -| `object` | [Optional[models.ConversationResponseObject]](../models/conversationresponseobject.md) | :heavy_minus_sign: | N/A | -| `conversation_id` | *str* | :heavy_check_mark: | N/A | -| `outputs` | List[[models.Output](../models/output.md)] | :heavy_check_mark: | N/A | -| `usage` | [models.ConversationUsageInfo](../models/conversationusageinfo.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | +| `object` | *Optional[Literal["conversation.response"]]* | :heavy_minus_sign: | N/A | +| `conversation_id` | *str* | :heavy_check_mark: | N/A | +| `outputs` | List[[models.Output](../models/output.md)] | :heavy_check_mark: | N/A | +| `usage` | [models.ConversationUsageInfo](../models/conversationusageinfo.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/conversationresponseobject.md b/docs/models/conversationresponseobject.md deleted file mode 100644 index bea66e52..00000000 --- a/docs/models/conversationresponseobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# ConversationResponseObject - - -## Values - -| Name | Value | -| ----------------------- | ----------------------- | -| `CONVERSATION_RESPONSE` | conversation.response | \ No newline at end of file diff --git a/docs/models/conversationrestartrequest.md b/docs/models/conversationrestartrequest.md index d9865312..ad3ff362 100644 --- a/docs/models/conversationrestartrequest.md +++ b/docs/models/conversationrestartrequest.md @@ -7,7 +7,7 @@ Request to restart a new conversation from a given entry in the conversation. | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| `inputs` | [models.ConversationInputs](../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationRestartRequestHandoffExecution]](../models/conversationrestartrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | diff --git a/docs/models/conversationrestartstreamrequest.md b/docs/models/conversationrestartstreamrequest.md index a5f8cbe7..865a1e8f 100644 --- a/docs/models/conversationrestartstreamrequest.md +++ b/docs/models/conversationrestartstreamrequest.md @@ -7,7 +7,7 @@ Request to restart a new conversation from a given entry in the conversation. | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | -| `inputs` | [models.ConversationInputs](../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationRestartStreamRequestHandoffExecution]](../models/conversationrestartstreamrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | diff --git a/docs/models/conversationthinkchunk.md b/docs/models/conversationthinkchunk.md new file mode 100644 index 00000000..1fb16bd9 --- /dev/null +++ b/docs/models/conversationthinkchunk.md @@ -0,0 +1,10 @@ +# ConversationThinkChunk + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `type` | *Optional[Literal["thinking"]]* | :heavy_minus_sign: | N/A | +| `thinking` | List[[models.ConversationThinkChunkThinking](../models/conversationthinkchunkthinking.md)] | :heavy_check_mark: | N/A | +| `closed` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/conversationthinkchunkthinking.md b/docs/models/conversationthinkchunkthinking.md new file mode 100644 index 00000000..84b80018 --- /dev/null +++ b/docs/models/conversationthinkchunkthinking.md @@ -0,0 +1,17 @@ +# ConversationThinkChunkThinking + + +## Supported Types + +### `models.TextChunk` + +```python +value: models.TextChunk = /* values here */ +``` + +### `models.ToolReferenceChunk` + +```python +value: models.ToolReferenceChunk = /* values here */ +``` + diff --git a/docs/models/agentupdaterequest.md b/docs/models/createagentrequest.md similarity index 80% rename from docs/models/agentupdaterequest.md rename to docs/models/createagentrequest.md index b1830d7b..cca3a079 100644 --- a/docs/models/agentupdaterequest.md +++ b/docs/models/createagentrequest.md @@ -1,4 +1,4 @@ -# AgentUpdateRequest +# CreateAgentRequest ## Fields @@ -6,12 +6,11 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | | `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | -| `tools` | List[[models.AgentUpdateRequestTool](../models/agentupdaterequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | +| `tools` | List[[models.CreateAgentRequestTool](../models/createagentrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | | `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `model` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | | `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | -| `deployment_chat` | *OptionalNullable[bool]* | :heavy_minus_sign: | N/A | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `version_message` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/agentupdaterequesttool.md b/docs/models/createagentrequesttool.md similarity index 96% rename from docs/models/agentupdaterequesttool.md rename to docs/models/createagentrequesttool.md index ce553126..c6ed3e98 100644 --- a/docs/models/agentupdaterequesttool.md +++ b/docs/models/createagentrequesttool.md @@ -1,4 +1,4 @@ -# AgentUpdateRequestTool +# CreateAgentRequestTool ## Supported Types diff --git a/docs/models/batchjobin.md b/docs/models/createbatchjobrequest.md similarity index 99% rename from docs/models/batchjobin.md rename to docs/models/createbatchjobrequest.md index 7dcf265d..d094e2d5 100644 --- a/docs/models/batchjobin.md +++ b/docs/models/createbatchjobrequest.md @@ -1,4 +1,4 @@ -# BatchJobIn +# CreateBatchJobRequest ## Fields @@ -8,7 +8,7 @@ | `input_files` | List[*str*] | :heavy_minus_sign: | The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a "body" field. An example of such file is the following: ```json {"custom_id": "0", "body": {"max_tokens": 100, "messages": [{"role": "user", "content": "What is the best French cheese?"}]}} {"custom_id": "1", "body": {"max_tokens": 100, "messages": [{"role": "user", "content": "What is the best French wine?"}]}} ``` | | | `requests` | List[[models.BatchRequest](../models/batchrequest.md)] | :heavy_minus_sign: | N/A | | | `endpoint` | [models.APIEndpoint](../models/apiendpoint.md) | :heavy_check_mark: | N/A | | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model to be used for batch inference. | mistral-small-latest | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model to be used for batch inference. | **Example 1:** mistral-small-latest
**Example 2:** mistral-medium-latest | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | In case you want to use a specific agent from the **deprecated** agents api for batch inference, you can specify the agent ID here. | | | `metadata` | Dict[str, *str*] | :heavy_minus_sign: | The metadata of your choice to be associated with the batch inference job. | | | `timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The timeout in hours for the batch inference job. | | \ No newline at end of file diff --git a/docs/models/uploadfileout.md b/docs/models/createfileresponse.md similarity index 99% rename from docs/models/uploadfileout.md rename to docs/models/createfileresponse.md index 6f09c9a6..8152922b 100644 --- a/docs/models/uploadfileout.md +++ b/docs/models/createfileresponse.md @@ -1,4 +1,4 @@ -# UploadFileOut +# CreateFileResponse ## Fields diff --git a/docs/models/jobin.md b/docs/models/createfinetuningjobrequest.md similarity index 97% rename from docs/models/jobin.md rename to docs/models/createfinetuningjobrequest.md index 62da9072..a93e323d 100644 --- a/docs/models/jobin.md +++ b/docs/models/createfinetuningjobrequest.md @@ -1,4 +1,4 @@ -# JobIn +# CreateFineTuningJobRequest ## Fields @@ -9,10 +9,10 @@ | `training_files` | List[[models.TrainingFile](../models/trainingfile.md)] | :heavy_minus_sign: | N/A | | `validation_files` | List[*str*] | :heavy_minus_sign: | A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files. | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | A string that will be added to your fine-tuning model name. For example, a suffix of "my-great-model" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...` | -| `integrations` | List[[models.JobInIntegration](../models/jobinintegration.md)] | :heavy_minus_sign: | A list of integrations to enable for your fine-tuning job. | +| `integrations` | List[[models.CreateFineTuningJobRequestIntegration](../models/createfinetuningjobrequestintegration.md)] | :heavy_minus_sign: | A list of integrations to enable for your fine-tuning job. | | `auto_start` | *Optional[bool]* | :heavy_minus_sign: | This field will be required in a future release. | | `invalid_sample_skip_percentage` | *Optional[float]* | :heavy_minus_sign: | N/A | | `job_type` | [OptionalNullable[models.FineTuneableModelType]](../models/finetuneablemodeltype.md) | :heavy_minus_sign: | N/A | | `hyperparameters` | [models.Hyperparameters](../models/hyperparameters.md) | :heavy_check_mark: | N/A | -| `repositories` | List[[models.JobInRepository](../models/jobinrepository.md)] | :heavy_minus_sign: | N/A | -| `classifier_targets` | List[[models.ClassifierTargetIn](../models/classifiertargetin.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file +| `repositories` | List[[models.CreateFineTuningJobRequestRepository](../models/createfinetuningjobrequestrepository.md)] | :heavy_minus_sign: | N/A | +| `classifier_targets` | List[[models.ClassifierTarget](../models/classifiertarget.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/jobinintegration.md b/docs/models/createfinetuningjobrequestintegration.md similarity index 74% rename from docs/models/jobinintegration.md rename to docs/models/createfinetuningjobrequestintegration.md index 103820e7..0054a4a6 100644 --- a/docs/models/jobinintegration.md +++ b/docs/models/createfinetuningjobrequestintegration.md @@ -1,4 +1,4 @@ -# JobInIntegration +# CreateFineTuningJobRequestIntegration ## Supported Types diff --git a/docs/models/jobinrepository.md b/docs/models/createfinetuningjobrequestrepository.md similarity index 75% rename from docs/models/jobinrepository.md rename to docs/models/createfinetuningjobrequestrepository.md index e873ae63..32be1b6d 100644 --- a/docs/models/jobinrepository.md +++ b/docs/models/createfinetuningjobrequestrepository.md @@ -1,4 +1,4 @@ -# JobInRepository +# CreateFineTuningJobRequestRepository ## Supported Types diff --git a/docs/models/createfinetuningjobresponse.md b/docs/models/createfinetuningjobresponse.md deleted file mode 100644 index f82cd793..00000000 --- a/docs/models/createfinetuningjobresponse.md +++ /dev/null @@ -1,19 +0,0 @@ -# CreateFineTuningJobResponse - -OK - - -## Supported Types - -### `models.Response` - -```python -value: models.Response = /* values here */ -``` - -### `models.LegacyJobMetadataOut` - -```python -value: models.LegacyJobMetadataOut = /* values here */ -``` - diff --git a/docs/models/libraryin.md b/docs/models/createlibraryrequest.md similarity index 95% rename from docs/models/libraryin.md rename to docs/models/createlibraryrequest.md index d6b11914..71562806 100644 --- a/docs/models/libraryin.md +++ b/docs/models/createlibraryrequest.md @@ -1,4 +1,4 @@ -# LibraryIn +# CreateLibraryRequest ## Fields diff --git a/docs/models/deletefileout.md b/docs/models/deletefileresponse.md similarity index 97% rename from docs/models/deletefileout.md rename to docs/models/deletefileresponse.md index 4709cc49..188e2504 100644 --- a/docs/models/deletefileout.md +++ b/docs/models/deletefileresponse.md @@ -1,4 +1,4 @@ -# DeleteFileOut +# DeleteFileResponse ## Fields diff --git a/docs/models/deletemodelrequest.md b/docs/models/deletemodelv1modelsmodeliddeleterequest.md similarity index 94% rename from docs/models/deletemodelrequest.md rename to docs/models/deletemodelv1modelsmodeliddeleterequest.md index d80103f1..d9bc15fe 100644 --- a/docs/models/deletemodelrequest.md +++ b/docs/models/deletemodelv1modelsmodeliddeleterequest.md @@ -1,4 +1,4 @@ -# DeleteModelRequest +# DeleteModelV1ModelsModelIDDeleteRequest ## Fields diff --git a/docs/models/document.md b/docs/models/document.md index 509d43b7..284babb9 100644 --- a/docs/models/document.md +++ b/docs/models/document.md @@ -1,25 +1,26 @@ # Document -Document to run OCR on - - -## Supported Types - -### `models.FileChunk` - -```python -value: models.FileChunk = /* values here */ -``` - -### `models.DocumentURLChunk` - -```python -value: models.DocumentURLChunk = /* values here */ -``` - -### `models.ImageURLChunk` - -```python -value: models.ImageURLChunk = /* values here */ -``` +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `id` | *str* | :heavy_check_mark: | N/A | +| `library_id` | *str* | :heavy_check_mark: | N/A | +| `hash` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `mime_type` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `extension` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `size` | *Nullable[int]* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `summary` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `last_processed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `number_of_pages` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `uploaded_by_id` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `uploaded_by_type` | *str* | :heavy_check_mark: | N/A | +| `tokens_processing_main_content` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `tokens_processing_summary` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | +| `url` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `attributes` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `processing_status` | *str* | :heavy_check_mark: | N/A | +| `tokens_processing_total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/documentlibrarytool.md b/docs/models/documentlibrarytool.md index 1695bad4..95c3fa52 100644 --- a/docs/models/documentlibrarytool.md +++ b/docs/models/documentlibrarytool.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | -| `type` | *Literal["document_library"]* | :heavy_check_mark: | N/A | -| `library_ids` | List[*str*] | :heavy_check_mark: | Ids of the library in which to search. | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `tool_configuration` | [OptionalNullable[models.ToolConfiguration]](../models/toolconfiguration.md) | :heavy_minus_sign: | N/A | +| `type` | *Literal["document_library"]* | :heavy_check_mark: | N/A | +| `library_ids` | List[*str*] | :heavy_check_mark: | Ids of the library in which to search. | \ No newline at end of file diff --git a/docs/models/documentout.md b/docs/models/documentout.md deleted file mode 100644 index 28df11eb..00000000 --- a/docs/models/documentout.md +++ /dev/null @@ -1,26 +0,0 @@ -# DocumentOut - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `id` | *str* | :heavy_check_mark: | N/A | -| `library_id` | *str* | :heavy_check_mark: | N/A | -| `hash` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `mime_type` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `extension` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `size` | *Nullable[int]* | :heavy_check_mark: | N/A | -| `name` | *str* | :heavy_check_mark: | N/A | -| `summary` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `last_processed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `number_of_pages` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `processing_status` | *str* | :heavy_check_mark: | N/A | -| `uploaded_by_id` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `uploaded_by_type` | *str* | :heavy_check_mark: | N/A | -| `tokens_processing_main_content` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `tokens_processing_summary` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | -| `url` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `attributes` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | -| `tokens_processing_total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/documentunion.md b/docs/models/documentunion.md new file mode 100644 index 00000000..e573bd46 --- /dev/null +++ b/docs/models/documentunion.md @@ -0,0 +1,25 @@ +# DocumentUnion + +Document to run OCR on + + +## Supported Types + +### `models.FileChunk` + +```python +value: models.FileChunk = /* values here */ +``` + +### `models.DocumentURLChunk` + +```python +value: models.DocumentURLChunk = /* values here */ +``` + +### `models.ImageURLChunk` + +```python +value: models.ImageURLChunk = /* values here */ +``` + diff --git a/docs/models/documentupdatein.md b/docs/models/documentupdatein.md deleted file mode 100644 index 0993886d..00000000 --- a/docs/models/documentupdatein.md +++ /dev/null @@ -1,9 +0,0 @@ -# DocumentUpdateIn - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `attributes` | Dict[str, [models.Attributes](../models/attributes.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/documenturlchunk.md b/docs/models/documenturlchunk.md index 6c9a5b4d..9dbfbe50 100644 --- a/docs/models/documenturlchunk.md +++ b/docs/models/documenturlchunk.md @@ -3,8 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -| `document_url` | *str* | :heavy_check_mark: | N/A | -| `document_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The filename of the document | -| `type` | [Optional[models.DocumentURLChunkType]](../models/documenturlchunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------- | ----------------------------------- | ----------------------------------- | ----------------------------------- | +| `type` | *Optional[Literal["document_url"]]* | :heavy_minus_sign: | N/A | +| `document_url` | *str* | :heavy_check_mark: | N/A | +| `document_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The filename of the document | \ No newline at end of file diff --git a/docs/models/documenturlchunktype.md b/docs/models/documenturlchunktype.md deleted file mode 100644 index 32e1fa9e..00000000 --- a/docs/models/documenturlchunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DocumentURLChunkType - - -## Values - -| Name | Value | -| -------------- | -------------- | -| `DOCUMENT_URL` | document_url | \ No newline at end of file diff --git a/docs/models/eventout.md b/docs/models/event.md similarity index 98% rename from docs/models/eventout.md rename to docs/models/event.md index d9202353..3eebffca 100644 --- a/docs/models/eventout.md +++ b/docs/models/event.md @@ -1,4 +1,4 @@ -# EventOut +# Event ## Fields diff --git a/docs/models/downloadfilerequest.md b/docs/models/filesapiroutesdeletefilerequest.md similarity index 88% rename from docs/models/downloadfilerequest.md rename to docs/models/filesapiroutesdeletefilerequest.md index 3f4dc6cc..1b02c2db 100644 --- a/docs/models/downloadfilerequest.md +++ b/docs/models/filesapiroutesdeletefilerequest.md @@ -1,4 +1,4 @@ -# DownloadFileRequest +# FilesAPIRoutesDeleteFileRequest ## Fields diff --git a/docs/models/retrievefilerequest.md b/docs/models/filesapiroutesdownloadfilerequest.md similarity index 88% rename from docs/models/retrievefilerequest.md rename to docs/models/filesapiroutesdownloadfilerequest.md index 454b9665..8b28cb0e 100644 --- a/docs/models/retrievefilerequest.md +++ b/docs/models/filesapiroutesdownloadfilerequest.md @@ -1,4 +1,4 @@ -# RetrieveFileRequest +# FilesAPIRoutesDownloadFileRequest ## Fields diff --git a/docs/models/getfilesignedurlrequest.md b/docs/models/filesapiroutesgetsignedurlrequest.md similarity index 96% rename from docs/models/getfilesignedurlrequest.md rename to docs/models/filesapiroutesgetsignedurlrequest.md index 0be3b288..dbe3c801 100644 --- a/docs/models/getfilesignedurlrequest.md +++ b/docs/models/filesapiroutesgetsignedurlrequest.md @@ -1,4 +1,4 @@ -# GetFileSignedURLRequest +# FilesAPIRoutesGetSignedURLRequest ## Fields diff --git a/docs/models/listfilesrequest.md b/docs/models/filesapirouteslistfilesrequest.md similarity index 98% rename from docs/models/listfilesrequest.md rename to docs/models/filesapirouteslistfilesrequest.md index 2d76a76b..57d11722 100644 --- a/docs/models/listfilesrequest.md +++ b/docs/models/filesapirouteslistfilesrequest.md @@ -1,4 +1,4 @@ -# ListFilesRequest +# FilesAPIRoutesListFilesRequest ## Fields diff --git a/docs/models/deletefilerequest.md b/docs/models/filesapiroutesretrievefilerequest.md similarity index 88% rename from docs/models/deletefilerequest.md rename to docs/models/filesapiroutesretrievefilerequest.md index bceae901..961bae1f 100644 --- a/docs/models/deletefilerequest.md +++ b/docs/models/filesapiroutesretrievefilerequest.md @@ -1,4 +1,4 @@ -# DeleteFileRequest +# FilesAPIRoutesRetrieveFileRequest ## Fields diff --git a/docs/models/ftmodelcapabilitiesout.md b/docs/models/finetunedmodelcapabilities.md similarity index 95% rename from docs/models/ftmodelcapabilitiesout.md rename to docs/models/finetunedmodelcapabilities.md index 19690476..d3203a2a 100644 --- a/docs/models/ftmodelcapabilitiesout.md +++ b/docs/models/finetunedmodelcapabilities.md @@ -1,4 +1,4 @@ -# FTModelCapabilitiesOut +# FineTunedModelCapabilities ## Fields diff --git a/docs/models/functioncallentry.md b/docs/models/functioncallentry.md index fd3aa5c5..2843db9d 100644 --- a/docs/models/functioncallentry.md +++ b/docs/models/functioncallentry.md @@ -3,13 +3,16 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `object` | [Optional[models.FunctionCallEntryObject]](../models/functioncallentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.FunctionCallEntryType]](../models/functioncallentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `tool_call_id` | *str* | :heavy_check_mark: | N/A | -| `name` | *str* | :heavy_check_mark: | N/A | -| `arguments` | [models.FunctionCallEntryArguments](../models/functioncallentryarguments.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["function.call"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `tool_call_id` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `arguments` | [models.FunctionCallEntryArguments](../models/functioncallentryarguments.md) | :heavy_check_mark: | N/A | +| `confirmation_status` | [OptionalNullable[models.FunctionCallEntryConfirmationStatus]](../models/functioncallentryconfirmationstatus.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/functioncallentryconfirmationstatus.md b/docs/models/functioncallentryconfirmationstatus.md new file mode 100644 index 00000000..8948beb6 --- /dev/null +++ b/docs/models/functioncallentryconfirmationstatus.md @@ -0,0 +1,10 @@ +# FunctionCallEntryConfirmationStatus + + +## Values + +| Name | Value | +| --------- | --------- | +| `PENDING` | pending | +| `ALLOWED` | allowed | +| `DENIED` | denied | \ No newline at end of file diff --git a/docs/models/functioncallentryobject.md b/docs/models/functioncallentryobject.md deleted file mode 100644 index 3cf2e427..00000000 --- a/docs/models/functioncallentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# FunctionCallEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/functioncallentrytype.md b/docs/models/functioncallentrytype.md deleted file mode 100644 index 7ea34c52..00000000 --- a/docs/models/functioncallentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# FunctionCallEntryType - - -## Values - -| Name | Value | -| --------------- | --------------- | -| `FUNCTION_CALL` | function.call | \ No newline at end of file diff --git a/docs/models/functioncallevent.md b/docs/models/functioncallevent.md index f4062060..0e3a36d6 100644 --- a/docs/models/functioncallevent.md +++ b/docs/models/functioncallevent.md @@ -3,12 +3,15 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `type` | *Literal["function.call.delta"]* | :heavy_check_mark: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `output_index` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `id` | *str* | :heavy_check_mark: | N/A | -| `name` | *str* | :heavy_check_mark: | N/A | -| `tool_call_id` | *str* | :heavy_check_mark: | N/A | -| `arguments` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `type` | *Literal["function.call.delta"]* | :heavy_check_mark: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `output_index` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `tool_call_id` | *str* | :heavy_check_mark: | N/A | +| `arguments` | *str* | :heavy_check_mark: | N/A | +| `confirmation_status` | [OptionalNullable[models.FunctionCallEventConfirmationStatus]](../models/functioncalleventconfirmationstatus.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/functioncalleventconfirmationstatus.md b/docs/models/functioncalleventconfirmationstatus.md new file mode 100644 index 00000000..4a3c8774 --- /dev/null +++ b/docs/models/functioncalleventconfirmationstatus.md @@ -0,0 +1,10 @@ +# FunctionCallEventConfirmationStatus + + +## Values + +| Name | Value | +| --------- | --------- | +| `PENDING` | pending | +| `ALLOWED` | allowed | +| `DENIED` | denied | \ No newline at end of file diff --git a/docs/models/functionresultentry.md b/docs/models/functionresultentry.md index 6df54d3d..6a77abfd 100644 --- a/docs/models/functionresultentry.md +++ b/docs/models/functionresultentry.md @@ -3,12 +3,12 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `object` | [Optional[models.FunctionResultEntryObject]](../models/functionresultentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.FunctionResultEntryType]](../models/functionresultentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `tool_call_id` | *str* | :heavy_check_mark: | N/A | -| `result` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["function.result"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `tool_call_id` | *str* | :heavy_check_mark: | N/A | +| `result` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/functionresultentryobject.md b/docs/models/functionresultentryobject.md deleted file mode 100644 index fe52e0a5..00000000 --- a/docs/models/functionresultentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# FunctionResultEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/functionresultentrytype.md b/docs/models/functionresultentrytype.md deleted file mode 100644 index 35c94d8e..00000000 --- a/docs/models/functionresultentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# FunctionResultEntryType - - -## Values - -| Name | Value | -| ----------------- | ----------------- | -| `FUNCTION_RESULT` | function.result | \ No newline at end of file diff --git a/docs/models/getdocumenttextcontentrequest.md b/docs/models/getdocumenttextcontentrequest.md deleted file mode 100644 index 85933401..00000000 --- a/docs/models/getdocumenttextcontentrequest.md +++ /dev/null @@ -1,9 +0,0 @@ -# GetDocumentTextContentRequest - - -## Fields - -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `library_id` | *str* | :heavy_check_mark: | N/A | -| `document_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/retrievefileout.md b/docs/models/getfileresponse.md similarity index 99% rename from docs/models/retrievefileout.md rename to docs/models/getfileresponse.md index 28f97dd2..0edd13e0 100644 --- a/docs/models/retrievefileout.md +++ b/docs/models/getfileresponse.md @@ -1,4 +1,4 @@ -# RetrieveFileOut +# GetFileResponse ## Fields diff --git a/docs/models/getfinetuningjobresponse.md b/docs/models/getfinetuningjobresponse.md deleted file mode 100644 index 1b0568dd..00000000 --- a/docs/models/getfinetuningjobresponse.md +++ /dev/null @@ -1,19 +0,0 @@ -# GetFineTuningJobResponse - -OK - - -## Supported Types - -### `models.ClassifierDetailedJobOut` - -```python -value: models.ClassifierDetailedJobOut = /* values here */ -``` - -### `models.CompletionDetailedJobOut` - -```python -value: models.CompletionDetailedJobOut = /* values here */ -``` - diff --git a/docs/models/filesignedurl.md b/docs/models/getsignedurlresponse.md similarity index 92% rename from docs/models/filesignedurl.md rename to docs/models/getsignedurlresponse.md index 52ce3f4f..bde69323 100644 --- a/docs/models/filesignedurl.md +++ b/docs/models/getsignedurlresponse.md @@ -1,4 +1,4 @@ -# FileSignedURL +# GetSignedURLResponse ## Fields diff --git a/docs/models/githubrepositoryout.md b/docs/models/githubrepository.md similarity index 97% rename from docs/models/githubrepositoryout.md rename to docs/models/githubrepository.md index fe38393a..827b6f34 100644 --- a/docs/models/githubrepositoryout.md +++ b/docs/models/githubrepository.md @@ -1,4 +1,4 @@ -# GithubRepositoryOut +# GithubRepository ## Fields diff --git a/docs/models/hyperparameters.md b/docs/models/hyperparameters.md index 46a6dd6b..b6c00c36 100644 --- a/docs/models/hyperparameters.md +++ b/docs/models/hyperparameters.md @@ -3,15 +3,15 @@ ## Supported Types -### `models.CompletionTrainingParametersIn` +### `models.CompletionTrainingParameters` ```python -value: models.CompletionTrainingParametersIn = /* values here */ +value: models.CompletionTrainingParameters = /* values here */ ``` -### `models.ClassifierTrainingParametersIn` +### `models.ClassifierTrainingParameters` ```python -value: models.ClassifierTrainingParametersIn = /* values here */ +value: models.ClassifierTrainingParameters = /* values here */ ``` diff --git a/docs/models/imagedetail.md b/docs/models/imagedetail.md new file mode 100644 index 00000000..1e5ba3fd --- /dev/null +++ b/docs/models/imagedetail.md @@ -0,0 +1,10 @@ +# ImageDetail + + +## Values + +| Name | Value | +| ------ | ------ | +| `LOW` | low | +| `AUTO` | auto | +| `HIGH` | high | \ No newline at end of file diff --git a/docs/models/imagegenerationtool.md b/docs/models/imagegenerationtool.md index 0c8de72c..b476b6f2 100644 --- a/docs/models/imagegenerationtool.md +++ b/docs/models/imagegenerationtool.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------------- | ----------------------------- | ----------------------------- | ----------------------------- | -| `type` | *Literal["image_generation"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `tool_configuration` | [OptionalNullable[models.ToolConfiguration]](../models/toolconfiguration.md) | :heavy_minus_sign: | N/A | +| `type` | *Literal["image_generation"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/imageurl.md b/docs/models/imageurl.md index 7c2bcbc3..6358e0ac 100644 --- a/docs/models/imageurl.md +++ b/docs/models/imageurl.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `url` | *str* | :heavy_check_mark: | N/A | -| `detail` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `url` | *str* | :heavy_check_mark: | N/A | +| `detail` | [OptionalNullable[models.ImageDetail]](../models/imagedetail.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/imageurlchunk.md b/docs/models/imageurlchunk.md index 43078c78..db0c53d2 100644 --- a/docs/models/imageurlchunk.md +++ b/docs/models/imageurlchunk.md @@ -5,7 +5,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `image_url` | [models.ImageURLUnion](../models/imageurlunion.md) | :heavy_check_mark: | N/A | -| `type` | [Optional[models.ImageURLChunkType]](../models/imageurlchunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | +| `type` | *Optional[Literal["image_url"]]* | :heavy_minus_sign: | N/A | +| `image_url` | [models.ImageURLUnion](../models/imageurlunion.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/imageurlchunktype.md b/docs/models/imageurlchunktype.md deleted file mode 100644 index 2064a0b4..00000000 --- a/docs/models/imageurlchunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ImageURLChunkType - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `IMAGE_URL` | image_url | \ No newline at end of file diff --git a/docs/models/inputs.md b/docs/models/inputs.md index 0f62a7ce..d5771207 100644 --- a/docs/models/inputs.md +++ b/docs/models/inputs.md @@ -5,10 +5,10 @@ Chat to classify ## Supported Types -### `models.InstructRequestInputs` +### `models.InstructRequest` ```python -value: models.InstructRequestInputs = /* values here */ +value: models.InstructRequest = /* values here */ ``` ### `List[models.InstructRequest]` diff --git a/docs/models/inputsmessage.md b/docs/models/inputsmessage.md deleted file mode 100644 index e3543fb4..00000000 --- a/docs/models/inputsmessage.md +++ /dev/null @@ -1,29 +0,0 @@ -# InputsMessage - - -## Supported Types - -### `models.AssistantMessage` - -```python -value: models.AssistantMessage = /* values here */ -``` - -### `models.SystemMessage` - -```python -value: models.SystemMessage = /* values here */ -``` - -### `models.ToolMessage` - -```python -value: models.ToolMessage = /* values here */ -``` - -### `models.UserMessage` - -```python -value: models.UserMessage = /* values here */ -``` - diff --git a/docs/models/instructrequestinputs.md b/docs/models/instructrequestinputs.md deleted file mode 100644 index 931ae5e4..00000000 --- a/docs/models/instructrequestinputs.md +++ /dev/null @@ -1,8 +0,0 @@ -# InstructRequestInputs - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `messages` | List[[models.InputsMessage](../models/inputsmessage.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/jobmetadataout.md b/docs/models/jobmetadata.md similarity index 98% rename from docs/models/jobmetadataout.md rename to docs/models/jobmetadata.md index 6218a161..5d8a89dd 100644 --- a/docs/models/jobmetadataout.md +++ b/docs/models/jobmetadata.md @@ -1,4 +1,4 @@ -# JobMetadataOut +# JobMetadata ## Fields diff --git a/docs/models/cancelbatchjobrequest.md b/docs/models/jobsapiroutesbatchcancelbatchjobrequest.md similarity index 86% rename from docs/models/cancelbatchjobrequest.md rename to docs/models/jobsapiroutesbatchcancelbatchjobrequest.md index f31f843b..c19d0241 100644 --- a/docs/models/cancelbatchjobrequest.md +++ b/docs/models/jobsapiroutesbatchcancelbatchjobrequest.md @@ -1,4 +1,4 @@ -# CancelBatchJobRequest +# JobsAPIRoutesBatchCancelBatchJobRequest ## Fields diff --git a/docs/models/getbatchjobrequest.md b/docs/models/jobsapiroutesbatchgetbatchjobrequest.md similarity index 92% rename from docs/models/getbatchjobrequest.md rename to docs/models/jobsapiroutesbatchgetbatchjobrequest.md index f3c67eb4..8c259bea 100644 --- a/docs/models/getbatchjobrequest.md +++ b/docs/models/jobsapiroutesbatchgetbatchjobrequest.md @@ -1,4 +1,4 @@ -# GetBatchJobRequest +# JobsAPIRoutesBatchGetBatchJobRequest ## Fields diff --git a/docs/models/listbatchjobsrequest.md b/docs/models/jobsapiroutesbatchgetbatchjobsrequest.md similarity index 98% rename from docs/models/listbatchjobsrequest.md rename to docs/models/jobsapiroutesbatchgetbatchjobsrequest.md index 19981b24..5ceb0b2c 100644 --- a/docs/models/listbatchjobsrequest.md +++ b/docs/models/jobsapiroutesbatchgetbatchjobsrequest.md @@ -1,4 +1,4 @@ -# ListBatchJobsRequest +# JobsAPIRoutesBatchGetBatchJobsRequest ## Fields diff --git a/docs/models/archivemodelrequest.md b/docs/models/jobsapiroutesfinetuningarchivefinetunedmodelrequest.md similarity index 93% rename from docs/models/archivemodelrequest.md rename to docs/models/jobsapiroutesfinetuningarchivefinetunedmodelrequest.md index 806d135e..f9700df5 100644 --- a/docs/models/archivemodelrequest.md +++ b/docs/models/jobsapiroutesfinetuningarchivefinetunedmodelrequest.md @@ -1,4 +1,4 @@ -# ArchiveModelRequest +# JobsAPIRoutesFineTuningArchiveFineTunedModelRequest ## Fields diff --git a/docs/models/cancelfinetuningjobrequest.md b/docs/models/jobsapiroutesfinetuningcancelfinetuningjobrequest.md similarity index 88% rename from docs/models/cancelfinetuningjobrequest.md rename to docs/models/jobsapiroutesfinetuningcancelfinetuningjobrequest.md index 6525788c..883cbac6 100644 --- a/docs/models/cancelfinetuningjobrequest.md +++ b/docs/models/jobsapiroutesfinetuningcancelfinetuningjobrequest.md @@ -1,4 +1,4 @@ -# CancelFineTuningJobRequest +# JobsAPIRoutesFineTuningCancelFineTuningJobRequest ## Fields diff --git a/docs/models/jobsapiroutesfinetuningcancelfinetuningjobresponse.md b/docs/models/jobsapiroutesfinetuningcancelfinetuningjobresponse.md new file mode 100644 index 00000000..fb62eb62 --- /dev/null +++ b/docs/models/jobsapiroutesfinetuningcancelfinetuningjobresponse.md @@ -0,0 +1,19 @@ +# JobsAPIRoutesFineTuningCancelFineTuningJobResponse + +OK + + +## Supported Types + +### `models.ClassifierFineTuningJobDetails` + +```python +value: models.ClassifierFineTuningJobDetails = /* values here */ +``` + +### `models.CompletionFineTuningJobDetails` + +```python +value: models.CompletionFineTuningJobDetails = /* values here */ +``` + diff --git a/docs/models/jobsapiroutesfinetuningcreatefinetuningjobresponse.md b/docs/models/jobsapiroutesfinetuningcreatefinetuningjobresponse.md new file mode 100644 index 00000000..7b52e2ca --- /dev/null +++ b/docs/models/jobsapiroutesfinetuningcreatefinetuningjobresponse.md @@ -0,0 +1,19 @@ +# JobsAPIRoutesFineTuningCreateFineTuningJobResponse + +OK + + +## Supported Types + +### `models.Response` + +```python +value: models.Response = /* values here */ +``` + +### `models.LegacyJobMetadata` + +```python +value: models.LegacyJobMetadata = /* values here */ +``` + diff --git a/docs/models/getfinetuningjobrequest.md b/docs/models/jobsapiroutesfinetuninggetfinetuningjobrequest.md similarity index 89% rename from docs/models/getfinetuningjobrequest.md rename to docs/models/jobsapiroutesfinetuninggetfinetuningjobrequest.md index f20cb214..fde19800 100644 --- a/docs/models/getfinetuningjobrequest.md +++ b/docs/models/jobsapiroutesfinetuninggetfinetuningjobrequest.md @@ -1,4 +1,4 @@ -# GetFineTuningJobRequest +# JobsAPIRoutesFineTuningGetFineTuningJobRequest ## Fields diff --git a/docs/models/jobsapiroutesfinetuninggetfinetuningjobresponse.md b/docs/models/jobsapiroutesfinetuninggetfinetuningjobresponse.md new file mode 100644 index 00000000..f7705327 --- /dev/null +++ b/docs/models/jobsapiroutesfinetuninggetfinetuningjobresponse.md @@ -0,0 +1,19 @@ +# JobsAPIRoutesFineTuningGetFineTuningJobResponse + +OK + + +## Supported Types + +### `models.ClassifierFineTuningJobDetails` + +```python +value: models.ClassifierFineTuningJobDetails = /* values here */ +``` + +### `models.CompletionFineTuningJobDetails` + +```python +value: models.CompletionFineTuningJobDetails = /* values here */ +``` + diff --git a/docs/models/jobsapiroutesfinetuninggetfinetuningjobsrequest.md b/docs/models/jobsapiroutesfinetuninggetfinetuningjobsrequest.md new file mode 100644 index 00000000..23c52c34 --- /dev/null +++ b/docs/models/jobsapiroutesfinetuninggetfinetuningjobsrequest.md @@ -0,0 +1,17 @@ +# JobsAPIRoutesFineTuningGetFineTuningJobsRequest + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | +| `page` | *Optional[int]* | :heavy_minus_sign: | The page number of the results to be returned. | +| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of items to return per page. | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model name used for fine-tuning to filter on. When set, the other results are not displayed. | +| `created_after` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date/time to filter on. When set, the results for previous creation times are not displayed. | +| `created_before` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `created_by_me` | *Optional[bool]* | :heavy_minus_sign: | When set, only return results for jobs created by the API caller. Other results are not displayed. | +| `status` | [OptionalNullable[models.JobsAPIRoutesFineTuningGetFineTuningJobsStatus]](../models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md) | :heavy_minus_sign: | The current job state to filter on. When set, the other results are not displayed. | +| `wandb_project` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weights and Biases project to filter on. When set, the other results are not displayed. | +| `wandb_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weight and Biases run name to filter on. When set, the other results are not displayed. | +| `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | The model suffix to filter on. When set, the other results are not displayed. | \ No newline at end of file diff --git a/docs/models/listfinetuningjobsstatus.md b/docs/models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md similarity index 94% rename from docs/models/listfinetuningjobsstatus.md rename to docs/models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md index 07db9ae5..40d57686 100644 --- a/docs/models/listfinetuningjobsstatus.md +++ b/docs/models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md @@ -1,4 +1,4 @@ -# ListFineTuningJobsStatus +# JobsAPIRoutesFineTuningGetFineTuningJobsStatus The current job state to filter on. When set, the other results are not displayed. diff --git a/docs/models/startfinetuningjobrequest.md b/docs/models/jobsapiroutesfinetuningstartfinetuningjobrequest.md similarity index 84% rename from docs/models/startfinetuningjobrequest.md rename to docs/models/jobsapiroutesfinetuningstartfinetuningjobrequest.md index 9df5aee8..4429fe48 100644 --- a/docs/models/startfinetuningjobrequest.md +++ b/docs/models/jobsapiroutesfinetuningstartfinetuningjobrequest.md @@ -1,4 +1,4 @@ -# StartFineTuningJobRequest +# JobsAPIRoutesFineTuningStartFineTuningJobRequest ## Fields diff --git a/docs/models/jobsapiroutesfinetuningstartfinetuningjobresponse.md b/docs/models/jobsapiroutesfinetuningstartfinetuningjobresponse.md new file mode 100644 index 00000000..1a7e71d4 --- /dev/null +++ b/docs/models/jobsapiroutesfinetuningstartfinetuningjobresponse.md @@ -0,0 +1,19 @@ +# JobsAPIRoutesFineTuningStartFineTuningJobResponse + +OK + + +## Supported Types + +### `models.ClassifierFineTuningJobDetails` + +```python +value: models.ClassifierFineTuningJobDetails = /* values here */ +``` + +### `models.CompletionFineTuningJobDetails` + +```python +value: models.CompletionFineTuningJobDetails = /* values here */ +``` + diff --git a/docs/models/unarchivemodelrequest.md b/docs/models/jobsapiroutesfinetuningunarchivefinetunedmodelrequest.md similarity index 92% rename from docs/models/unarchivemodelrequest.md rename to docs/models/jobsapiroutesfinetuningunarchivefinetunedmodelrequest.md index 033dad8a..95c1734d 100644 --- a/docs/models/unarchivemodelrequest.md +++ b/docs/models/jobsapiroutesfinetuningunarchivefinetunedmodelrequest.md @@ -1,4 +1,4 @@ -# UnarchiveModelRequest +# JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest ## Fields diff --git a/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelrequest.md b/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelrequest.md new file mode 100644 index 00000000..dbe49a86 --- /dev/null +++ b/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelrequest.md @@ -0,0 +1,9 @@ +# JobsAPIRoutesFineTuningUpdateFineTunedModelRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `model_id` | *str* | :heavy_check_mark: | The ID of the model to update. | ft:open-mistral-7b:587a6b29:20240514:7e773925 | +| `update_model_request` | [models.UpdateModelRequest](../models/updatemodelrequest.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelresponse.md b/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelresponse.md new file mode 100644 index 00000000..f40350bf --- /dev/null +++ b/docs/models/jobsapiroutesfinetuningupdatefinetunedmodelresponse.md @@ -0,0 +1,19 @@ +# JobsAPIRoutesFineTuningUpdateFineTunedModelResponse + +OK + + +## Supported Types + +### `models.ClassifierFineTunedModel` + +```python +value: models.ClassifierFineTunedModel = /* values here */ +``` + +### `models.CompletionFineTunedModel` + +```python +value: models.CompletionFineTunedModel = /* values here */ +``` + diff --git a/docs/models/jobsout.md b/docs/models/jobsout.md deleted file mode 100644 index 69f8342a..00000000 --- a/docs/models/jobsout.md +++ /dev/null @@ -1,10 +0,0 @@ -# JobsOut - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | -| `data` | List[[models.JobsOutData](../models/jobsoutdata.md)] | :heavy_minus_sign: | N/A | -| `object` | *Optional[Literal["list"]]* | :heavy_minus_sign: | N/A | -| `total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/jobsoutdata.md b/docs/models/jobsoutdata.md deleted file mode 100644 index 28cec311..00000000 --- a/docs/models/jobsoutdata.md +++ /dev/null @@ -1,17 +0,0 @@ -# JobsOutData - - -## Supported Types - -### `models.ClassifierJobOut` - -```python -value: models.ClassifierJobOut = /* values here */ -``` - -### `models.CompletionJobOut` - -```python -value: models.CompletionJobOut = /* values here */ -``` - diff --git a/docs/models/legacyjobmetadataout.md b/docs/models/legacyjobmetadata.md similarity index 99% rename from docs/models/legacyjobmetadataout.md rename to docs/models/legacyjobmetadata.md index 8a712140..4705ab4f 100644 --- a/docs/models/legacyjobmetadataout.md +++ b/docs/models/legacyjobmetadata.md @@ -1,4 +1,4 @@ -# LegacyJobMetadataOut +# LegacyJobMetadata ## Fields diff --git a/docs/models/getlibraryrequest.md b/docs/models/librariesdeletev1request.md similarity index 90% rename from docs/models/getlibraryrequest.md rename to docs/models/librariesdeletev1request.md index 2a3acf50..68d7e543 100644 --- a/docs/models/getlibraryrequest.md +++ b/docs/models/librariesdeletev1request.md @@ -1,4 +1,4 @@ -# GetLibraryRequest +# LibrariesDeleteV1Request ## Fields diff --git a/docs/models/getdocumentstatusrequest.md b/docs/models/librariesdocumentsdeletev1request.md similarity index 90% rename from docs/models/getdocumentstatusrequest.md rename to docs/models/librariesdocumentsdeletev1request.md index 3557d773..efccdb1b 100644 --- a/docs/models/getdocumentstatusrequest.md +++ b/docs/models/librariesdocumentsdeletev1request.md @@ -1,4 +1,4 @@ -# GetDocumentStatusRequest +# LibrariesDocumentsDeleteV1Request ## Fields diff --git a/docs/models/librariesdocumentsgetextractedtextsignedurlv1request.md b/docs/models/librariesdocumentsgetextractedtextsignedurlv1request.md new file mode 100644 index 00000000..14ca66f7 --- /dev/null +++ b/docs/models/librariesdocumentsgetextractedtextsignedurlv1request.md @@ -0,0 +1,9 @@ +# LibrariesDocumentsGetExtractedTextSignedURLV1Request + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `library_id` | *str* | :heavy_check_mark: | N/A | +| `document_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/librariesdocumentsgetsignedurlv1request.md b/docs/models/librariesdocumentsgetsignedurlv1request.md new file mode 100644 index 00000000..7c08c180 --- /dev/null +++ b/docs/models/librariesdocumentsgetsignedurlv1request.md @@ -0,0 +1,9 @@ +# LibrariesDocumentsGetSignedURLV1Request + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `library_id` | *str* | :heavy_check_mark: | N/A | +| `document_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getdocumentrequest.md b/docs/models/librariesdocumentsgetstatusv1request.md similarity index 90% rename from docs/models/getdocumentrequest.md rename to docs/models/librariesdocumentsgetstatusv1request.md index 29f62127..e6d41875 100644 --- a/docs/models/getdocumentrequest.md +++ b/docs/models/librariesdocumentsgetstatusv1request.md @@ -1,4 +1,4 @@ -# GetDocumentRequest +# LibrariesDocumentsGetStatusV1Request ## Fields diff --git a/docs/models/getdocumentextractedtextsignedurlrequest.md b/docs/models/librariesdocumentsgettextcontentv1request.md similarity index 89% rename from docs/models/getdocumentextractedtextsignedurlrequest.md rename to docs/models/librariesdocumentsgettextcontentv1request.md index ff703802..2f58a446 100644 --- a/docs/models/getdocumentextractedtextsignedurlrequest.md +++ b/docs/models/librariesdocumentsgettextcontentv1request.md @@ -1,4 +1,4 @@ -# GetDocumentExtractedTextSignedURLRequest +# LibrariesDocumentsGetTextContentV1Request ## Fields diff --git a/docs/models/getdocumentsignedurlrequest.md b/docs/models/librariesdocumentsgetv1request.md similarity index 91% rename from docs/models/getdocumentsignedurlrequest.md rename to docs/models/librariesdocumentsgetv1request.md index 72a179c0..6febc058 100644 --- a/docs/models/getdocumentsignedurlrequest.md +++ b/docs/models/librariesdocumentsgetv1request.md @@ -1,4 +1,4 @@ -# GetDocumentSignedURLRequest +# LibrariesDocumentsGetV1Request ## Fields diff --git a/docs/models/listdocumentsrequest.md b/docs/models/librariesdocumentslistv1request.md similarity index 96% rename from docs/models/listdocumentsrequest.md rename to docs/models/librariesdocumentslistv1request.md index 369e8edb..44f63001 100644 --- a/docs/models/listdocumentsrequest.md +++ b/docs/models/librariesdocumentslistv1request.md @@ -1,4 +1,4 @@ -# ListDocumentsRequest +# LibrariesDocumentsListV1Request ## Fields diff --git a/docs/models/deletedocumentrequest.md b/docs/models/librariesdocumentsreprocessv1request.md similarity index 90% rename from docs/models/deletedocumentrequest.md rename to docs/models/librariesdocumentsreprocessv1request.md index eb060099..196ba17b 100644 --- a/docs/models/deletedocumentrequest.md +++ b/docs/models/librariesdocumentsreprocessv1request.md @@ -1,4 +1,4 @@ -# DeleteDocumentRequest +# LibrariesDocumentsReprocessV1Request ## Fields diff --git a/docs/models/librariesdocumentsupdatev1request.md b/docs/models/librariesdocumentsupdatev1request.md new file mode 100644 index 00000000..d4630850 --- /dev/null +++ b/docs/models/librariesdocumentsupdatev1request.md @@ -0,0 +1,10 @@ +# LibrariesDocumentsUpdateV1Request + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | +| `library_id` | *str* | :heavy_check_mark: | N/A | +| `document_id` | *str* | :heavy_check_mark: | N/A | +| `update_document_request` | [models.UpdateDocumentRequest](../models/updatedocumentrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/uploaddocumentrequest.md b/docs/models/librariesdocumentsuploadv1request.md similarity index 96% rename from docs/models/uploaddocumentrequest.md rename to docs/models/librariesdocumentsuploadv1request.md index 92152b7f..172a6183 100644 --- a/docs/models/uploaddocumentrequest.md +++ b/docs/models/librariesdocumentsuploadv1request.md @@ -1,4 +1,4 @@ -# UploadDocumentRequest +# LibrariesDocumentsUploadV1Request ## Fields diff --git a/docs/models/deletelibraryrequest.md b/docs/models/librariesgetv1request.md similarity index 91% rename from docs/models/deletelibraryrequest.md rename to docs/models/librariesgetv1request.md index c229ad73..6e1e04c3 100644 --- a/docs/models/deletelibraryrequest.md +++ b/docs/models/librariesgetv1request.md @@ -1,4 +1,4 @@ -# DeleteLibraryRequest +# LibrariesGetV1Request ## Fields diff --git a/docs/models/updateorcreatelibraryaccessrequest.md b/docs/models/librariessharecreatev1request.md similarity index 95% rename from docs/models/updateorcreatelibraryaccessrequest.md rename to docs/models/librariessharecreatev1request.md index e04567b4..4c05241d 100644 --- a/docs/models/updateorcreatelibraryaccessrequest.md +++ b/docs/models/librariessharecreatev1request.md @@ -1,4 +1,4 @@ -# UpdateOrCreateLibraryAccessRequest +# LibrariesShareCreateV1Request ## Fields diff --git a/docs/models/deletelibraryaccessrequest.md b/docs/models/librariessharedeletev1request.md similarity index 96% rename from docs/models/deletelibraryaccessrequest.md rename to docs/models/librariessharedeletev1request.md index c7034b98..850e22ab 100644 --- a/docs/models/deletelibraryaccessrequest.md +++ b/docs/models/librariessharedeletev1request.md @@ -1,4 +1,4 @@ -# DeleteLibraryAccessRequest +# LibrariesShareDeleteV1Request ## Fields diff --git a/docs/models/listlibraryaccessesrequest.md b/docs/models/librariessharelistv1request.md similarity index 90% rename from docs/models/listlibraryaccessesrequest.md rename to docs/models/librariessharelistv1request.md index d98bcda2..98bf6d17 100644 --- a/docs/models/listlibraryaccessesrequest.md +++ b/docs/models/librariessharelistv1request.md @@ -1,4 +1,4 @@ -# ListLibraryAccessesRequest +# LibrariesShareListV1Request ## Fields diff --git a/docs/models/librariesupdatev1request.md b/docs/models/librariesupdatev1request.md new file mode 100644 index 00000000..c5c142db --- /dev/null +++ b/docs/models/librariesupdatev1request.md @@ -0,0 +1,9 @@ +# LibrariesUpdateV1Request + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `library_id` | *str* | :heavy_check_mark: | N/A | +| `update_library_request` | [models.UpdateLibraryRequest](../models/updatelibraryrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/libraryout.md b/docs/models/library.md similarity index 99% rename from docs/models/libraryout.md rename to docs/models/library.md index ebf46d57..4319f43d 100644 --- a/docs/models/libraryout.md +++ b/docs/models/library.md @@ -1,4 +1,4 @@ -# LibraryOut +# Library ## Fields diff --git a/docs/models/libraryinupdate.md b/docs/models/libraryinupdate.md deleted file mode 100644 index 4aa169c7..00000000 --- a/docs/models/libraryinupdate.md +++ /dev/null @@ -1,9 +0,0 @@ -# LibraryInUpdate - - -## Fields - -| Field | Type | Required | Description | -| ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/listbatchjobsresponse.md b/docs/models/listbatchjobsresponse.md new file mode 100644 index 00000000..c23e3220 --- /dev/null +++ b/docs/models/listbatchjobsresponse.md @@ -0,0 +1,10 @@ +# ListBatchJobsResponse + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | +| `data` | List[[models.BatchJob](../models/batchjob.md)] | :heavy_minus_sign: | N/A | +| `object` | *Optional[Literal["list"]]* | :heavy_minus_sign: | N/A | +| `total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/listdocumentout.md b/docs/models/listdocumentsresponse.md similarity index 90% rename from docs/models/listdocumentout.md rename to docs/models/listdocumentsresponse.md index f14157b8..47b9d3b7 100644 --- a/docs/models/listdocumentout.md +++ b/docs/models/listdocumentsresponse.md @@ -1,4 +1,4 @@ -# ListDocumentOut +# ListDocumentsResponse ## Fields @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | | `pagination` | [models.PaginationInfo](../models/paginationinfo.md) | :heavy_check_mark: | N/A | -| `data` | List[[models.DocumentOut](../models/documentout.md)] | :heavy_check_mark: | N/A | \ No newline at end of file +| `data` | List[[models.Document](../models/document.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/listfilesout.md b/docs/models/listfilesresponse.md similarity index 98% rename from docs/models/listfilesout.md rename to docs/models/listfilesresponse.md index bcb1f13a..802f685f 100644 --- a/docs/models/listfilesout.md +++ b/docs/models/listfilesresponse.md @@ -1,4 +1,4 @@ -# ListFilesOut +# ListFilesResponse ## Fields diff --git a/docs/models/listfinetuningjobsrequest.md b/docs/models/listfinetuningjobsrequest.md deleted file mode 100644 index 3a04fc70..00000000 --- a/docs/models/listfinetuningjobsrequest.md +++ /dev/null @@ -1,17 +0,0 @@ -# ListFineTuningJobsRequest - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `page` | *Optional[int]* | :heavy_minus_sign: | The page number of the results to be returned. | -| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of items to return per page. | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model name used for fine-tuning to filter on. When set, the other results are not displayed. | -| `created_after` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date/time to filter on. When set, the results for previous creation times are not displayed. | -| `created_before` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `created_by_me` | *Optional[bool]* | :heavy_minus_sign: | When set, only return results for jobs created by the API caller. Other results are not displayed. | -| `status` | [OptionalNullable[models.ListFineTuningJobsStatus]](../models/listfinetuningjobsstatus.md) | :heavy_minus_sign: | The current job state to filter on. When set, the other results are not displayed. | -| `wandb_project` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weights and Biases project to filter on. When set, the other results are not displayed. | -| `wandb_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weight and Biases run name to filter on. When set, the other results are not displayed. | -| `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | The model suffix to filter on. When set, the other results are not displayed. | \ No newline at end of file diff --git a/docs/models/listfinetuningjobsresponse.md b/docs/models/listfinetuningjobsresponse.md new file mode 100644 index 00000000..00251242 --- /dev/null +++ b/docs/models/listfinetuningjobsresponse.md @@ -0,0 +1,10 @@ +# ListFineTuningJobsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `data` | List[[models.ListFineTuningJobsResponseData](../models/listfinetuningjobsresponsedata.md)] | :heavy_minus_sign: | N/A | +| `object` | *Optional[Literal["list"]]* | :heavy_minus_sign: | N/A | +| `total` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/listfinetuningjobsresponsedata.md b/docs/models/listfinetuningjobsresponsedata.md new file mode 100644 index 00000000..adb06444 --- /dev/null +++ b/docs/models/listfinetuningjobsresponsedata.md @@ -0,0 +1,17 @@ +# ListFineTuningJobsResponseData + + +## Supported Types + +### `models.ClassifierFineTuningJob` + +```python +value: models.ClassifierFineTuningJob = /* values here */ +``` + +### `models.CompletionFineTuningJob` + +```python +value: models.CompletionFineTuningJob = /* values here */ +``` + diff --git a/docs/models/listlibrariesresponse.md b/docs/models/listlibrariesresponse.md new file mode 100644 index 00000000..e21b9ced --- /dev/null +++ b/docs/models/listlibrariesresponse.md @@ -0,0 +1,8 @@ +# ListLibrariesResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `data` | List[[models.Library](../models/library.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/listlibraryout.md b/docs/models/listlibraryout.md deleted file mode 100644 index db76ffa1..00000000 --- a/docs/models/listlibraryout.md +++ /dev/null @@ -1,8 +0,0 @@ -# ListLibraryOut - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -| `data` | List[[models.LibraryOut](../models/libraryout.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/messageinputcontentchunks.md b/docs/models/messageinputcontentchunks.md index 4fd18a0d..05617850 100644 --- a/docs/models/messageinputcontentchunks.md +++ b/docs/models/messageinputcontentchunks.md @@ -27,9 +27,9 @@ value: models.ToolFileChunk = /* values here */ value: models.DocumentURLChunk = /* values here */ ``` -### `models.ThinkChunk` +### `models.ConversationThinkChunk` ```python -value: models.ThinkChunk = /* values here */ +value: models.ConversationThinkChunk = /* values here */ ``` diff --git a/docs/models/messageinputentry.md b/docs/models/messageinputentry.md index 52183a32..f8514fb3 100644 --- a/docs/models/messageinputentry.md +++ b/docs/models/messageinputentry.md @@ -5,13 +5,13 @@ Representation of an input message inside the conversation. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `object` | [Optional[models.MessageInputEntryObject]](../models/messageinputentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.MessageInputEntryType]](../models/messageinputentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `role` | [models.MessageInputEntryRole](../models/messageinputentryrole.md) | :heavy_check_mark: | N/A | -| `content` | [models.MessageInputEntryContent](../models/messageinputentrycontent.md) | :heavy_check_mark: | N/A | -| `prefix` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["message.input"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `role` | [models.Role](../models/role.md) | :heavy_check_mark: | N/A | +| `content` | [models.MessageInputEntryContent](../models/messageinputentrycontent.md) | :heavy_check_mark: | N/A | +| `prefix` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/messageinputentryobject.md b/docs/models/messageinputentryobject.md deleted file mode 100644 index 6bdd62e2..00000000 --- a/docs/models/messageinputentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageInputEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/messageinputentrytype.md b/docs/models/messageinputentrytype.md deleted file mode 100644 index d3378124..00000000 --- a/docs/models/messageinputentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageInputEntryType - - -## Values - -| Name | Value | -| --------------- | --------------- | -| `MESSAGE_INPUT` | message.input | \ No newline at end of file diff --git a/docs/models/messageoutputcontentchunks.md b/docs/models/messageoutputcontentchunks.md index d9c3d50e..c4a7777e 100644 --- a/docs/models/messageoutputcontentchunks.md +++ b/docs/models/messageoutputcontentchunks.md @@ -27,10 +27,10 @@ value: models.ToolFileChunk = /* values here */ value: models.DocumentURLChunk = /* values here */ ``` -### `models.ThinkChunk` +### `models.ConversationThinkChunk` ```python -value: models.ThinkChunk = /* values here */ +value: models.ConversationThinkChunk = /* values here */ ``` ### `models.ToolReferenceChunk` diff --git a/docs/models/messageoutputentry.md b/docs/models/messageoutputentry.md index 5b42e20d..73a1c666 100644 --- a/docs/models/messageoutputentry.md +++ b/docs/models/messageoutputentry.md @@ -3,14 +3,14 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -| `object` | [Optional[models.MessageOutputEntryObject]](../models/messageoutputentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.MessageOutputEntryType]](../models/messageoutputentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `role` | [Optional[models.MessageOutputEntryRole]](../models/messageoutputentryrole.md) | :heavy_minus_sign: | N/A | -| `content` | [models.MessageOutputEntryContent](../models/messageoutputentrycontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["message.output"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `role` | *Optional[Literal["assistant"]]* | :heavy_minus_sign: | N/A | +| `content` | [models.MessageOutputEntryContent](../models/messageoutputentrycontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/messageoutputentryobject.md b/docs/models/messageoutputentryobject.md deleted file mode 100644 index bb254c82..00000000 --- a/docs/models/messageoutputentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageOutputEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/messageoutputentryrole.md b/docs/models/messageoutputentryrole.md deleted file mode 100644 index 783ee0aa..00000000 --- a/docs/models/messageoutputentryrole.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageOutputEntryRole - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `ASSISTANT` | assistant | \ No newline at end of file diff --git a/docs/models/messageoutputentrytype.md b/docs/models/messageoutputentrytype.md deleted file mode 100644 index cb4a7a1b..00000000 --- a/docs/models/messageoutputentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageOutputEntryType - - -## Values - -| Name | Value | -| ---------------- | ---------------- | -| `MESSAGE_OUTPUT` | message.output | \ No newline at end of file diff --git a/docs/models/messageoutputevent.md b/docs/models/messageoutputevent.md index b0fa1a2d..e09a965f 100644 --- a/docs/models/messageoutputevent.md +++ b/docs/models/messageoutputevent.md @@ -3,14 +3,14 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `type` | *Literal["message.output.delta"]* | :heavy_check_mark: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `output_index` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `id` | *str* | :heavy_check_mark: | N/A | -| `content_index` | *Optional[int]* | :heavy_minus_sign: | N/A | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `role` | [Optional[models.MessageOutputEventRole]](../models/messageoutputeventrole.md) | :heavy_minus_sign: | N/A | -| `content` | [models.MessageOutputEventContent](../models/messageoutputeventcontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `type` | *Literal["message.output.delta"]* | :heavy_check_mark: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `output_index` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `content_index` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `role` | *Optional[Literal["assistant"]]* | :heavy_minus_sign: | N/A | +| `content` | [models.MessageOutputEventContent](../models/messageoutputeventcontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/messageoutputeventrole.md b/docs/models/messageoutputeventrole.md deleted file mode 100644 index e38c6472..00000000 --- a/docs/models/messageoutputeventrole.md +++ /dev/null @@ -1,8 +0,0 @@ -# MessageOutputEventRole - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `ASSISTANT` | assistant | \ No newline at end of file diff --git a/docs/models/metricout.md b/docs/models/metric.md similarity index 98% rename from docs/models/metricout.md rename to docs/models/metric.md index 3c552bac..7f863036 100644 --- a/docs/models/metricout.md +++ b/docs/models/metric.md @@ -1,4 +1,4 @@ -# MetricOut +# Metric Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase). diff --git a/docs/models/modelconversation.md b/docs/models/modelconversation.md index 813e1f3a..af2e5c61 100644 --- a/docs/models/modelconversation.md +++ b/docs/models/modelconversation.md @@ -3,16 +3,16 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | -| `tools` | List[[models.ModelConversationTool](../models/modelconversationtool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | -| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | Name given to the conversation. | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | Description of the what the conversation is about. | -| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | -| `object` | [Optional[models.ModelConversationObject]](../models/modelconversationobject.md) | :heavy_minus_sign: | N/A | -| `id` | *str* | :heavy_check_mark: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `model` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | +| `tools` | List[[models.ModelConversationTool](../models/modelconversationtool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | +| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | Name given to the conversation. | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | Description of the what the conversation is about. | +| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | Custom metadata for the conversation. | +| `object` | *Optional[Literal["conversation"]]* | :heavy_minus_sign: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `updated_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `model` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/modelconversationobject.md b/docs/models/modelconversationobject.md deleted file mode 100644 index ead1fa26..00000000 --- a/docs/models/modelconversationobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# ModelConversationObject - - -## Values - -| Name | Value | -| -------------- | -------------- | -| `CONVERSATION` | conversation | \ No newline at end of file diff --git a/docs/models/ocrrequest.md b/docs/models/ocrrequest.md index 87929e53..dd3fc2ea 100644 --- a/docs/models/ocrrequest.md +++ b/docs/models/ocrrequest.md @@ -3,18 +3,18 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | -| `document` | [models.Document](../models/document.md) | :heavy_check_mark: | Document to run OCR on | | -| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | -| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | -| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | -| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | -| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | -| `table_format` | [OptionalNullable[models.TableFormat]](../models/tableformat.md) | :heavy_minus_sign: | N/A | | -| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | -| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `document` | [models.DocumentUnion](../models/documentunion.md) | :heavy_check_mark: | Document to run OCR on | | +| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | +| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | +| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | +| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | +| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | +| `table_format` | [OptionalNullable[models.TableFormat]](../models/tableformat.md) | :heavy_minus_sign: | N/A | | +| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/outputcontentchunks.md b/docs/models/outputcontentchunks.md index c76bc31d..e5185014 100644 --- a/docs/models/outputcontentchunks.md +++ b/docs/models/outputcontentchunks.md @@ -27,10 +27,10 @@ value: models.ToolFileChunk = /* values here */ value: models.DocumentURLChunk = /* values here */ ``` -### `models.ThinkChunk` +### `models.ConversationThinkChunk` ```python -value: models.ThinkChunk = /* values here */ +value: models.ConversationThinkChunk = /* values here */ ``` ### `models.ToolReferenceChunk` diff --git a/docs/models/realtimetranscriptioninputaudioappend.md b/docs/models/realtimetranscriptioninputaudioappend.md new file mode 100644 index 00000000..5ee365eb --- /dev/null +++ b/docs/models/realtimetranscriptioninputaudioappend.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionInputAudioAppend + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | +| `type` | *Optional[Literal["input_audio.append"]]* | :heavy_minus_sign: | N/A | +| `audio` | *str* | :heavy_check_mark: | Base64-encoded raw PCM bytes matching the current audio_format. Max decoded size: 262144 bytes. | \ No newline at end of file diff --git a/docs/models/realtimetranscriptioninputaudioend.md b/docs/models/realtimetranscriptioninputaudioend.md new file mode 100644 index 00000000..393d208c --- /dev/null +++ b/docs/models/realtimetranscriptioninputaudioend.md @@ -0,0 +1,8 @@ +# RealtimeTranscriptionInputAudioEnd + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | +| `type` | *Optional[Literal["input_audio.end"]]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptioninputaudioflush.md b/docs/models/realtimetranscriptioninputaudioflush.md new file mode 100644 index 00000000..367725ba --- /dev/null +++ b/docs/models/realtimetranscriptioninputaudioflush.md @@ -0,0 +1,8 @@ +# RealtimeTranscriptionInputAudioFlush + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `type` | *Optional[Literal["input_audio.flush"]]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionsession.md b/docs/models/realtimetranscriptionsession.md index 94a0a89e..750bd7f7 100644 --- a/docs/models/realtimetranscriptionsession.md +++ b/docs/models/realtimetranscriptionsession.md @@ -7,4 +7,5 @@ | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | | `request_id` | *str* | :heavy_check_mark: | N/A | | `model` | *str* | :heavy_check_mark: | N/A | -| `audio_format` | [models.AudioFormat](../models/audioformat.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `audio_format` | [models.AudioFormat](../models/audioformat.md) | :heavy_check_mark: | N/A | +| `target_streaming_delay_ms` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/realtimetranscriptionsessionupdatemessage.md b/docs/models/realtimetranscriptionsessionupdatemessage.md new file mode 100644 index 00000000..2a50ca92 --- /dev/null +++ b/docs/models/realtimetranscriptionsessionupdatemessage.md @@ -0,0 +1,9 @@ +# RealtimeTranscriptionSessionUpdateMessage + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `type` | *Optional[Literal["session.update"]]* | :heavy_minus_sign: | N/A | +| `session` | [models.RealtimeTranscriptionSessionUpdatePayload](../models/realtimetranscriptionsessionupdatepayload.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getagentrequest.md b/docs/models/realtimetranscriptionsessionupdatepayload.md similarity index 57% rename from docs/models/getagentrequest.md rename to docs/models/realtimetranscriptionsessionupdatepayload.md index 3f729dff..d6c6547d 100644 --- a/docs/models/getagentrequest.md +++ b/docs/models/realtimetranscriptionsessionupdatepayload.md @@ -1,9 +1,9 @@ -# GetAgentRequest +# RealtimeTranscriptionSessionUpdatePayload ## Fields | Field | Type | Required | Description | | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | [OptionalNullable[models.GetAgentAgentVersion]](../models/getagentagentversion.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `audio_format` | [OptionalNullable[models.AudioFormat]](../models/audioformat.md) | :heavy_minus_sign: | Set before sending audio. Audio format updates are rejected after audio starts. | +| `target_streaming_delay_ms` | *OptionalNullable[int]* | :heavy_minus_sign: | Set before sending audio. Streaming delay updates are rejected after audio starts. | \ No newline at end of file diff --git a/docs/models/referencechunk.md b/docs/models/referencechunk.md index a132ca2f..d847e248 100644 --- a/docs/models/referencechunk.md +++ b/docs/models/referencechunk.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `reference_ids` | List[*int*] | :heavy_check_mark: | N/A | -| `type` | [Optional[models.ReferenceChunkType]](../models/referencechunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------- | -------------------------------- | -------------------------------- | -------------------------------- | +| `type` | *Optional[Literal["reference"]]* | :heavy_minus_sign: | N/A | +| `reference_ids` | List[*int*] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/referencechunktype.md b/docs/models/referencechunktype.md deleted file mode 100644 index 1e0e2fe6..00000000 --- a/docs/models/referencechunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ReferenceChunkType - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `REFERENCE` | reference | \ No newline at end of file diff --git a/docs/models/reprocessdocumentrequest.md b/docs/models/reprocessdocumentrequest.md deleted file mode 100644 index cf3982a8..00000000 --- a/docs/models/reprocessdocumentrequest.md +++ /dev/null @@ -1,9 +0,0 @@ -# ReprocessDocumentRequest - - -## Fields - -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `library_id` | *str* | :heavy_check_mark: | N/A | -| `document_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/response.md b/docs/models/response.md index 3512b7a8..ff679257 100644 --- a/docs/models/response.md +++ b/docs/models/response.md @@ -3,15 +3,15 @@ ## Supported Types -### `models.ClassifierJobOut` +### `models.ClassifierFineTuningJob` ```python -value: models.ClassifierJobOut = /* values here */ +value: models.ClassifierFineTuningJob = /* values here */ ``` -### `models.CompletionJobOut` +### `models.CompletionFineTuningJob` ```python -value: models.CompletionJobOut = /* values here */ +value: models.CompletionFineTuningJob = /* values here */ ``` diff --git a/docs/models/retrievemodelrequest.md b/docs/models/retrievemodelv1modelsmodelidgetrequest.md similarity index 94% rename from docs/models/retrievemodelrequest.md rename to docs/models/retrievemodelv1modelsmodelidgetrequest.md index 787c3dd1..f1280f88 100644 --- a/docs/models/retrievemodelrequest.md +++ b/docs/models/retrievemodelv1modelsmodelidgetrequest.md @@ -1,4 +1,4 @@ -# RetrieveModelRequest +# RetrieveModelV1ModelsModelIDGetRequest ## Fields diff --git a/docs/models/messageinputentryrole.md b/docs/models/role.md similarity index 84% rename from docs/models/messageinputentryrole.md rename to docs/models/role.md index f2fdc71d..853c6257 100644 --- a/docs/models/messageinputentryrole.md +++ b/docs/models/role.md @@ -1,4 +1,4 @@ -# MessageInputEntryRole +# Role ## Values diff --git a/docs/models/startfinetuningjobresponse.md b/docs/models/startfinetuningjobresponse.md deleted file mode 100644 index dce84c5a..00000000 --- a/docs/models/startfinetuningjobresponse.md +++ /dev/null @@ -1,19 +0,0 @@ -# StartFineTuningJobResponse - -OK - - -## Supported Types - -### `models.ClassifierDetailedJobOut` - -```python -value: models.ClassifierDetailedJobOut = /* values here */ -``` - -### `models.CompletionDetailedJobOut` - -```python -value: models.CompletionDetailedJobOut = /* values here */ -``` - diff --git a/docs/models/systemmessage.md b/docs/models/systemmessage.md index dfb0cd0b..10bda10f 100644 --- a/docs/models/systemmessage.md +++ b/docs/models/systemmessage.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | -| `content` | [models.SystemMessageContent](../models/systemmessagecontent.md) | :heavy_check_mark: | N/A | -| `role` | *Literal["system"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `role` | *Literal["system"]* | :heavy_check_mark: | N/A | +| `content` | [models.SystemMessageContent](../models/systemmessagecontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/textchunk.md b/docs/models/textchunk.md index d488cb51..df0e61c3 100644 --- a/docs/models/textchunk.md +++ b/docs/models/textchunk.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| `text` | *str* | :heavy_check_mark: | N/A | -| `type` | [Optional[models.TextChunkType]](../models/textchunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------- | --------------------------- | --------------------------- | --------------------------- | +| `type` | *Optional[Literal["text"]]* | :heavy_minus_sign: | N/A | +| `text` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/textchunktype.md b/docs/models/textchunktype.md deleted file mode 100644 index e2a2ae8b..00000000 --- a/docs/models/textchunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# TextChunkType - - -## Values - -| Name | Value | -| ------ | ------ | -| `TEXT` | text | \ No newline at end of file diff --git a/docs/models/thinkchunk.md b/docs/models/thinkchunk.md index 66b2e0cd..70c0369f 100644 --- a/docs/models/thinkchunk.md +++ b/docs/models/thinkchunk.md @@ -5,6 +5,6 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | -| `thinking` | List[[models.Thinking](../models/thinking.md)] | :heavy_check_mark: | N/A | -| `closed` | *Optional[bool]* | :heavy_minus_sign: | Whether the thinking chunk is closed or not. Currently only used for prefixing. | -| `type` | [Optional[models.ThinkChunkType]](../models/thinkchunktype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `type` | *Literal["thinking"]* | :heavy_check_mark: | N/A | +| `thinking` | List[[models.ThinkChunkThinking](../models/thinkchunkthinking.md)] | :heavy_check_mark: | N/A | +| `closed` | *Optional[bool]* | :heavy_minus_sign: | Whether the thinking chunk is closed or not. Currently only used for prefixing. | \ No newline at end of file diff --git a/docs/models/thinking.md b/docs/models/thinkchunkthinking.md similarity index 90% rename from docs/models/thinking.md rename to docs/models/thinkchunkthinking.md index c7a0d5c9..dd1ecca1 100644 --- a/docs/models/thinking.md +++ b/docs/models/thinkchunkthinking.md @@ -1,4 +1,4 @@ -# Thinking +# ThinkChunkThinking ## Supported Types diff --git a/docs/models/thinkchunktype.md b/docs/models/thinkchunktype.md deleted file mode 100644 index baf6f755..00000000 --- a/docs/models/thinkchunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ThinkChunkType - - -## Values - -| Name | Value | -| ---------- | ---------- | -| `THINKING` | thinking | \ No newline at end of file diff --git a/docs/models/toolcallconfirmation.md b/docs/models/toolcallconfirmation.md new file mode 100644 index 00000000..1812f7d6 --- /dev/null +++ b/docs/models/toolcallconfirmation.md @@ -0,0 +1,9 @@ +# ToolCallConfirmation + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | +| `tool_call_id` | *str* | :heavy_check_mark: | N/A | +| `confirmation` | [models.Confirmation](../models/confirmation.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/updateftmodelin.md b/docs/models/toolconfiguration.md similarity index 54% rename from docs/models/updateftmodelin.md rename to docs/models/toolconfiguration.md index 4e55b1a7..89286a17 100644 --- a/docs/models/updateftmodelin.md +++ b/docs/models/toolconfiguration.md @@ -1,9 +1,10 @@ -# UpdateFTModelIn +# ToolConfiguration ## Fields | Field | Type | Required | Description | | ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `exclude` | List[*str*] | :heavy_minus_sign: | N/A | +| `include` | List[*str*] | :heavy_minus_sign: | N/A | +| `requires_confirmation` | List[*str*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/toolexecutionentry.md b/docs/models/toolexecutionentry.md index adf88fb1..03316381 100644 --- a/docs/models/toolexecutionentry.md +++ b/docs/models/toolexecutionentry.md @@ -3,13 +3,15 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -| `object` | [Optional[models.ToolExecutionEntryObject]](../models/toolexecutionentryobject.md) | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.ToolExecutionEntryType]](../models/toolexecutionentrytype.md) | :heavy_minus_sign: | N/A | -| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | -| `name` | [models.ToolExecutionEntryName](../models/toolexecutionentryname.md) | :heavy_check_mark: | N/A | -| `arguments` | *str* | :heavy_check_mark: | N/A | -| `info` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `object` | *Optional[Literal["entry"]]* | :heavy_minus_sign: | N/A | +| `type` | *Optional[Literal["tool.execution"]]* | :heavy_minus_sign: | N/A | +| `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `completed_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `name` | [models.ToolExecutionEntryName](../models/toolexecutionentryname.md) | :heavy_check_mark: | N/A | +| `arguments` | *str* | :heavy_check_mark: | N/A | +| `info` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/toolexecutionentryobject.md b/docs/models/toolexecutionentryobject.md deleted file mode 100644 index 0ca79af5..00000000 --- a/docs/models/toolexecutionentryobject.md +++ /dev/null @@ -1,8 +0,0 @@ -# ToolExecutionEntryObject - - -## Values - -| Name | Value | -| ------- | ------- | -| `ENTRY` | entry | \ No newline at end of file diff --git a/docs/models/toolexecutionentrytype.md b/docs/models/toolexecutionentrytype.md deleted file mode 100644 index a67629b8..00000000 --- a/docs/models/toolexecutionentrytype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ToolExecutionEntryType - - -## Values - -| Name | Value | -| ---------------- | ---------------- | -| `TOOL_EXECUTION` | tool.execution | \ No newline at end of file diff --git a/docs/models/toolexecutionstartedevent.md b/docs/models/toolexecutionstartedevent.md index c41c7258..189b8a3d 100644 --- a/docs/models/toolexecutionstartedevent.md +++ b/docs/models/toolexecutionstartedevent.md @@ -9,5 +9,7 @@ | `created_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | | `output_index` | *Optional[int]* | :heavy_minus_sign: | N/A | | `id` | *str* | :heavy_check_mark: | N/A | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `name` | [models.ToolExecutionStartedEventName](../models/toolexecutionstartedeventname.md) | :heavy_check_mark: | N/A | | `arguments` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/toolfilechunk.md b/docs/models/toolfilechunk.md index a3ffaa2b..d6002175 100644 --- a/docs/models/toolfilechunk.md +++ b/docs/models/toolfilechunk.md @@ -3,10 +3,10 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `type` | [Optional[models.ToolFileChunkType]](../models/toolfilechunktype.md) | :heavy_minus_sign: | N/A | -| `tool` | [models.ToolFileChunkTool](../models/toolfilechunktool.md) | :heavy_check_mark: | N/A | -| `file_id` | *str* | :heavy_check_mark: | N/A | -| `file_name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `file_type` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `type` | *Optional[Literal["tool_file"]]* | :heavy_minus_sign: | N/A | +| `tool` | [models.ToolFileChunkTool](../models/toolfilechunktool.md) | :heavy_check_mark: | N/A | +| `file_id` | *str* | :heavy_check_mark: | N/A | +| `file_name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `file_type` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/toolfilechunktype.md b/docs/models/toolfilechunktype.md deleted file mode 100644 index 7e99acef..00000000 --- a/docs/models/toolfilechunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ToolFileChunkType - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `TOOL_FILE` | tool_file | \ No newline at end of file diff --git a/docs/models/toolmessage.md b/docs/models/toolmessage.md index fa00d666..7201481e 100644 --- a/docs/models/toolmessage.md +++ b/docs/models/toolmessage.md @@ -5,7 +5,7 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `role` | *Literal["tool"]* | :heavy_check_mark: | N/A | | `content` | [Nullable[models.ToolMessageContent]](../models/toolmessagecontent.md) | :heavy_check_mark: | N/A | | `tool_call_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `role` | *Literal["tool"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/toolreferencechunk.md b/docs/models/toolreferencechunk.md index 3020dbc9..49ea4ca7 100644 --- a/docs/models/toolreferencechunk.md +++ b/docs/models/toolreferencechunk.md @@ -3,11 +3,11 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `type` | [Optional[models.ToolReferenceChunkType]](../models/toolreferencechunktype.md) | :heavy_minus_sign: | N/A | -| `tool` | [models.ToolReferenceChunkTool](../models/toolreferencechunktool.md) | :heavy_check_mark: | N/A | -| `title` | *str* | :heavy_check_mark: | N/A | -| `url` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `favicon` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `type` | *Optional[Literal["tool_reference"]]* | :heavy_minus_sign: | N/A | +| `tool` | [models.ToolReferenceChunkTool](../models/toolreferencechunktool.md) | :heavy_check_mark: | N/A | +| `title` | *str* | :heavy_check_mark: | N/A | +| `url` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `favicon` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/toolreferencechunktype.md b/docs/models/toolreferencechunktype.md deleted file mode 100644 index bc57d277..00000000 --- a/docs/models/toolreferencechunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# ToolReferenceChunkType - - -## Values - -| Name | Value | -| ---------------- | ---------------- | -| `TOOL_REFERENCE` | tool_reference | \ No newline at end of file diff --git a/docs/models/transcriptionsegmentchunk.md b/docs/models/transcriptionsegmentchunk.md index 00a599ee..d7672c0e 100644 --- a/docs/models/transcriptionsegmentchunk.md +++ b/docs/models/transcriptionsegmentchunk.md @@ -3,12 +3,12 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -| `text` | *str* | :heavy_check_mark: | N/A | -| `start` | *float* | :heavy_check_mark: | N/A | -| `end` | *float* | :heavy_check_mark: | N/A | -| `score` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | -| `speaker_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `type` | [Optional[models.TranscriptionSegmentChunkType]](../models/transcriptionsegmentchunktype.md) | :heavy_minus_sign: | N/A | -| `__pydantic_extra__` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `type` | *Optional[Literal["transcription_segment"]]* | :heavy_minus_sign: | N/A | +| `text` | *str* | :heavy_check_mark: | N/A | +| `start` | *float* | :heavy_check_mark: | N/A | +| `end` | *float* | :heavy_check_mark: | N/A | +| `score` | *OptionalNullable[float]* | :heavy_minus_sign: | N/A | +| `speaker_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `__pydantic_extra__` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/transcriptionsegmentchunktype.md b/docs/models/transcriptionsegmentchunktype.md deleted file mode 100644 index 2968fa26..00000000 --- a/docs/models/transcriptionsegmentchunktype.md +++ /dev/null @@ -1,8 +0,0 @@ -# TranscriptionSegmentChunkType - - -## Values - -| Name | Value | -| ----------------------- | ----------------------- | -| `TRANSCRIPTION_SEGMENT` | transcription_segment | \ No newline at end of file diff --git a/docs/models/transcriptionstreamsegmentdelta.md b/docs/models/transcriptionstreamsegmentdelta.md index e0143a39..1b652a3b 100644 --- a/docs/models/transcriptionstreamsegmentdelta.md +++ b/docs/models/transcriptionstreamsegmentdelta.md @@ -5,9 +5,9 @@ | Field | Type | Required | Description | | ---------------------------------- | ---------------------------------- | ---------------------------------- | ---------------------------------- | +| `type` | *Literal["transcription.segment"]* | :heavy_check_mark: | N/A | | `text` | *str* | :heavy_check_mark: | N/A | | `start` | *float* | :heavy_check_mark: | N/A | | `end` | *float* | :heavy_check_mark: | N/A | | `speaker_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `type` | *Literal["transcription.segment"]* | :heavy_check_mark: | N/A | | `__pydantic_extra__` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/transcriptionstreamtextdelta.md b/docs/models/transcriptionstreamtextdelta.md index a4062171..77bd0ddc 100644 --- a/docs/models/transcriptionstreamtextdelta.md +++ b/docs/models/transcriptionstreamtextdelta.md @@ -5,6 +5,6 @@ | Field | Type | Required | Description | | ------------------------------------- | ------------------------------------- | ------------------------------------- | ------------------------------------- | -| `text` | *str* | :heavy_check_mark: | N/A | | `type` | *Literal["transcription.text.delta"]* | :heavy_check_mark: | N/A | +| `text` | *str* | :heavy_check_mark: | N/A | | `__pydantic_extra__` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/archiveftmodelout.md b/docs/models/unarchivemodelresponse.md similarity index 96% rename from docs/models/archiveftmodelout.md rename to docs/models/unarchivemodelresponse.md index 98fa7b19..375962a7 100644 --- a/docs/models/archiveftmodelout.md +++ b/docs/models/unarchivemodelresponse.md @@ -1,4 +1,4 @@ -# ArchiveFTModelOut +# UnarchiveModelResponse ## Fields diff --git a/docs/models/updateagentrequest.md b/docs/models/updateagentrequest.md index 358cb71d..d3428d92 100644 --- a/docs/models/updateagentrequest.md +++ b/docs/models/updateagentrequest.md @@ -3,7 +3,15 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_update_request` | [models.AgentUpdateRequest](../models/agentupdaterequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | +| `tools` | List[[models.UpdateAgentRequestTool](../models/updateagentrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | +| `completion_args` | [Optional[models.CompletionArgs]](../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | +| `deployment_chat` | *OptionalNullable[bool]* | :heavy_minus_sign: | N/A | +| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `version_message` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/agentcreationrequesttool.md b/docs/models/updateagentrequesttool.md similarity index 95% rename from docs/models/agentcreationrequesttool.md rename to docs/models/updateagentrequesttool.md index b3bd7fa3..e358b1ed 100644 --- a/docs/models/agentcreationrequesttool.md +++ b/docs/models/updateagentrequesttool.md @@ -1,4 +1,4 @@ -# AgentCreationRequestTool +# UpdateAgentRequestTool ## Supported Types diff --git a/docs/models/updatedocumentrequest.md b/docs/models/updatedocumentrequest.md index fa5d117a..7e0b41b7 100644 --- a/docs/models/updatedocumentrequest.md +++ b/docs/models/updatedocumentrequest.md @@ -3,8 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `library_id` | *str* | :heavy_check_mark: | N/A | -| `document_id` | *str* | :heavy_check_mark: | N/A | -| `document_update_in` | [models.DocumentUpdateIn](../models/documentupdatein.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `attributes` | Dict[str, [models.Attributes](../models/attributes.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/updatelibraryrequest.md b/docs/models/updatelibraryrequest.md index e03883cc..aaffc5a9 100644 --- a/docs/models/updatelibraryrequest.md +++ b/docs/models/updatelibraryrequest.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | -| `library_id` | *str* | :heavy_check_mark: | N/A | -| `library_in_update` | [models.LibraryInUpdate](../models/libraryinupdate.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------- | ----------------------- | ----------------------- | ----------------------- | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/updatemodelrequest.md b/docs/models/updatemodelrequest.md index 5799c63b..56b84c59 100644 --- a/docs/models/updatemodelrequest.md +++ b/docs/models/updatemodelrequest.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | -| `model_id` | *str* | :heavy_check_mark: | The ID of the model to update. | ft:open-mistral-7b:587a6b29:20240514:7e773925 | -| `update_ft_model_in` | [models.UpdateFTModelIn](../models/updateftmodelin.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------- | ----------------------- | ----------------------- | ----------------------- | +| `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/updatemodelresponse.md b/docs/models/updatemodelresponse.md deleted file mode 100644 index 275ee77f..00000000 --- a/docs/models/updatemodelresponse.md +++ /dev/null @@ -1,19 +0,0 @@ -# UpdateModelResponse - -OK - - -## Supported Types - -### `models.ClassifierFTModelOut` - -```python -value: models.ClassifierFTModelOut = /* values here */ -``` - -### `models.CompletionFTModelOut` - -```python -value: models.CompletionFTModelOut = /* values here */ -``` - diff --git a/docs/models/usermessage.md b/docs/models/usermessage.md index 78ed066e..e7a932ed 100644 --- a/docs/models/usermessage.md +++ b/docs/models/usermessage.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `content` | [Nullable[models.UserMessageContent]](../models/usermessagecontent.md) | :heavy_check_mark: | N/A | -| `role` | *Literal["user"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `role` | *Literal["user"]* | :heavy_check_mark: | N/A | +| `content` | [Nullable[models.UserMessageContent]](../models/usermessagecontent.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/wandbintegrationout.md b/docs/models/wandbintegrationresult.md similarity index 98% rename from docs/models/wandbintegrationout.md rename to docs/models/wandbintegrationresult.md index a6f65667..d12bc311 100644 --- a/docs/models/wandbintegrationout.md +++ b/docs/models/wandbintegrationresult.md @@ -1,4 +1,4 @@ -# WandbIntegrationOut +# WandbIntegrationResult ## Fields diff --git a/docs/models/websearchpremiumtool.md b/docs/models/websearchpremiumtool.md index 07b8b926..78b736cd 100644 --- a/docs/models/websearchpremiumtool.md +++ b/docs/models/websearchpremiumtool.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------- | ------------------------------- | ------------------------------- | ------------------------------- | -| `type` | *Literal["web_search_premium"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `tool_configuration` | [OptionalNullable[models.ToolConfiguration]](../models/toolconfiguration.md) | :heavy_minus_sign: | N/A | +| `type` | *Literal["web_search_premium"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/websearchtool.md b/docs/models/websearchtool.md index da5e7b7b..4ca7333c 100644 --- a/docs/models/websearchtool.md +++ b/docs/models/websearchtool.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `type` | *Literal["web_search"]* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `tool_configuration` | [OptionalNullable[models.ToolConfiguration]](../models/toolconfiguration.md) | :heavy_minus_sign: | N/A | +| `type` | *Literal["web_search"]* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/sdks/accesses/README.md b/docs/sdks/accesses/README.md index c1e3866d..c50456df 100644 --- a/docs/sdks/accesses/README.md +++ b/docs/sdks/accesses/README.md @@ -16,7 +16,7 @@ Given a library, list all of the Entity that have access and to what level. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -26,7 +26,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.accesses.list(library_id="9eb628ef-f118-47eb-b3cc-9750c4ca5fb6") + res = mistral.beta.libraries.accesses.list(library_id="d2169833-d8e2-416e-a372-76518d3d99c2") # Handle response print(res) @@ -48,8 +48,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update_or_create @@ -57,7 +57,7 @@ Given a library id, you can create or update the access level of an entity. You ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -67,7 +67,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.accesses.update_or_create(library_id="88bb030c-1cb5-4231-ba13-742c56554876", level="Viewer", share_with_uuid="6a736283-c1fa-49b0-9b6d-ea9309c0a766", share_with_type="Workspace") + res = mistral.beta.libraries.accesses.update_or_create(library_id="36de3a24-5b1c-4c8f-9d84-d5642205a976", level="Viewer", share_with_uuid="0ae92ecb-21ed-47c5-9f7e-0b2cbe325a20", share_with_type="User") # Handle response print(res) @@ -93,8 +93,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -102,7 +102,7 @@ Given a library id, you can delete the access level of an entity. An owner canno ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -112,7 +112,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.accesses.delete(library_id="fc7ab1cf-e33c-4791-a6e0-95ff1f921c43", share_with_uuid="5818ddff-3568-40f1-a9e4-39d6cb9f5c94", share_with_type="Org") + res = mistral.beta.libraries.accesses.delete(library_id="709e3cad-9fb2-4f4e-bf88-143cf1808107", share_with_uuid="b843cc47-ce8f-4354-8cfc-5fcd7fb2865b", share_with_type="User") # Handle response print(res) @@ -137,5 +137,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/agents/README.md b/docs/sdks/agents/README.md index cd3ec4c6..8a608370 100644 --- a/docs/sdks/agents/README.md +++ b/docs/sdks/agents/README.md @@ -15,7 +15,7 @@ Agents Completion ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -27,8 +27,8 @@ with Mistral( res = mistral.agents.complete(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=False, response_format={ "type": "text", @@ -50,7 +50,7 @@ with Mistral( | `stop` | [Optional[models.AgentsCompletionRequestStop]](../../models/agentscompletionrequeststop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | -| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.AgentsCompletionRequestToolChoice]](../../models/agentscompletionrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | @@ -69,8 +69,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## stream @@ -90,8 +90,8 @@ with Mistral( res = mistral.agents.stream(messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], agent_id="", stream=True, response_format={ "type": "text", @@ -115,7 +115,7 @@ with Mistral( | `stop` | [Optional[models.AgentsCompletionStreamRequestStop]](../../models/agentscompletionstreamrequeststop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | -| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../../models/tool.md)] | :heavy_minus_sign: | N/A | | | `tool_choice` | [Optional[models.AgentsCompletionStreamRequestToolChoice]](../../models/agentscompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | N/A | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | @@ -134,5 +134,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/batchjobs/README.md b/docs/sdks/batchjobs/README.md index 24316d78..3633fe4e 100644 --- a/docs/sdks/batchjobs/README.md +++ b/docs/sdks/batchjobs/README.md @@ -15,7 +15,7 @@ Get a list of batch jobs for your organization and user. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -49,13 +49,13 @@ with Mistral( ### Response -**[models.BatchJobsOut](../../models/batchjobsout.md)** +**[models.ListBatchJobsResponse](../../models/listbatchjobsresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## create @@ -63,7 +63,7 @@ Create a new batch job, it will be queued for processing. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -73,7 +73,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.batch.jobs.create(endpoint="/v1/classifications", model="mistral-small-latest", timeout_hours=24) + res = mistral.batch.jobs.create(endpoint="/v1/moderations", model="mistral-small-latest", timeout_hours=24) # Handle response print(res) @@ -87,7 +87,7 @@ with Mistral( | `endpoint` | [models.APIEndpoint](../../models/apiendpoint.md) | :heavy_check_mark: | N/A | | | `input_files` | List[*str*] | :heavy_minus_sign: | The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a "body" field. An example of such file is the following: ```json {"custom_id": "0", "body": {"max_tokens": 100, "messages": [{"role": "user", "content": "What is the best French cheese?"}]}} {"custom_id": "1", "body": {"max_tokens": 100, "messages": [{"role": "user", "content": "What is the best French wine?"}]}} ``` | | | `requests` | List[[models.BatchRequest](../../models/batchrequest.md)] | :heavy_minus_sign: | N/A | | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model to be used for batch inference. | mistral-small-latest | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model to be used for batch inference. | **Example 1:** mistral-small-latest
**Example 2:** mistral-medium-latest | | `agent_id` | *OptionalNullable[str]* | :heavy_minus_sign: | In case you want to use a specific agent from the **deprecated** agents api for batch inference, you can specify the agent ID here. | | | `metadata` | Dict[str, *str*] | :heavy_minus_sign: | The metadata of your choice to be associated with the batch inference job. | | | `timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The timeout in hours for the batch inference job. | | @@ -95,13 +95,13 @@ with Mistral( ### Response -**[models.BatchJobOut](../../models/batchjobout.md)** +**[models.BatchJob](../../models/batchjob.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -112,7 +112,7 @@ Args: ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -122,7 +122,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.batch.jobs.get(job_id="358c80a1-79bd-43f0-8f0e-8186713aa3ba") + res = mistral.batch.jobs.get(job_id="4017dc9f-b629-42f4-9700-8c681b9e7f0f") # Handle response print(res) @@ -139,13 +139,13 @@ with Mistral( ### Response -**[models.BatchJobOut](../../models/batchjobout.md)** +**[models.BatchJob](../../models/batchjob.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## cancel @@ -153,7 +153,7 @@ Request the cancellation of a batch job. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -163,7 +163,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.batch.jobs.cancel(job_id="393537d7-8b33-4931-a289-7f61f8757eda") + res = mistral.batch.jobs.cancel(job_id="4fb29d1c-535b-4f0a-a1cb-2167f86da569") # Handle response print(res) @@ -179,10 +179,10 @@ with Mistral( ### Response -**[models.BatchJobOut](../../models/batchjobout.md)** +**[models.BatchJob](../../models/batchjob.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/betaagents/README.md b/docs/sdks/betaagents/README.md index 0ef655a3..aaa5110e 100644 --- a/docs/sdks/betaagents/README.md +++ b/docs/sdks/betaagents/README.md @@ -24,7 +24,7 @@ Create a new agent giving it instructions, tools, description. The agent is then ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -34,7 +34,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.agents.create(model="Mustang", name="", completion_args={ + res = mistral.beta.agents.create(model="LeBaron", name="", completion_args={ "response_format": { "type": "text", }, @@ -47,18 +47,18 @@ with Mistral( ### Parameters -| Parameter | Type | Required | Description | -| --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | -| `model` | *str* | :heavy_check_mark: | N/A | -| `name` | *str* | :heavy_check_mark: | N/A | -| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | -| `tools` | List[[models.AgentCreationRequestTool](../../models/agentcreationrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | -| `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | -| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | -| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | -| `version_message` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | +| `model` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | +| `tools` | List[[models.CreateAgentRequestTool](../../models/createagentrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | +| `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `description` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `handoffs` | List[*str*] | :heavy_minus_sign: | N/A | +| `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `version_message` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -68,8 +68,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## list @@ -77,7 +77,7 @@ Retrieve a list of agent entities sorted by creation time. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -116,8 +116,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -125,7 +125,7 @@ Given an agent, retrieve an agent entity with its attributes. The agent_version ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -144,11 +144,11 @@ with Mistral( ### Parameters -| Parameter | Type | Required | Description | -| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | -| `agent_id` | *str* | :heavy_check_mark: | N/A | -| `agent_version` | [OptionalNullable[models.GetAgentAgentVersion]](../../models/getagentagentversion.md) | :heavy_minus_sign: | N/A | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | +| `agent_id` | *str* | :heavy_check_mark: | N/A | +| `agent_version` | [OptionalNullable[models.AgentsAPIV1AgentsGetAgentVersion]](../../models/agentsapiv1agentsgetagentversion.md) | :heavy_minus_sign: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -158,8 +158,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update @@ -167,7 +167,7 @@ Update an agent attributes and create a new version. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -194,7 +194,7 @@ with Mistral( | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | | `agent_id` | *str* | :heavy_check_mark: | N/A | | `instructions` | *OptionalNullable[str]* | :heavy_minus_sign: | Instruction prompt the model will follow during the conversation. | -| `tools` | List[[models.AgentUpdateRequestTool](../../models/agentupdaterequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | +| `tools` | List[[models.UpdateAgentRequestTool](../../models/updateagentrequesttool.md)] | :heavy_minus_sign: | List of tools which are available to the model during the conversation. | | `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | | `model` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | @@ -213,8 +213,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -222,7 +222,7 @@ Delete an agent entity. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -249,8 +249,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update_version @@ -258,7 +258,7 @@ Switch the version of an agent. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -268,7 +268,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.agents.update_version(agent_id="", version=958693) + res = mistral.beta.agents.update_version(agent_id="", version=157995) # Handle response print(res) @@ -291,8 +291,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## list_versions @@ -300,7 +300,7 @@ Retrieve all versions for a specific agent with full agent context. Supports pag ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -334,8 +334,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get_version @@ -343,7 +343,7 @@ Get a specific agent version by version number. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -353,7 +353,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.agents.get_version(agent_id="", version="") + res = mistral.beta.agents.get_version(agent_id="", version="788393") # Handle response print(res) @@ -376,8 +376,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## create_version_alias @@ -385,7 +385,7 @@ Create a new alias or update an existing alias to point to a specific version. A ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -395,7 +395,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.agents.create_version_alias(agent_id="", alias="", version=154719) + res = mistral.beta.agents.create_version_alias(agent_id="", alias="", version=595141) # Handle response print(res) @@ -419,8 +419,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## list_version_aliases @@ -428,7 +428,7 @@ Retrieve all version aliases for a specific agent. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -460,8 +460,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete_version_alias @@ -469,7 +469,7 @@ Delete an existing alias for an agent. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -497,5 +497,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/chat/README.md b/docs/sdks/chat/README.md index 6907c29d..1bf4aead 100644 --- a/docs/sdks/chat/README.md +++ b/docs/sdks/chat/README.md @@ -27,8 +27,8 @@ with Mistral( res = mistral.chat.complete(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=False, response_format={ "type": "text", @@ -52,7 +52,7 @@ with Mistral( | `stop` | [Optional[models.ChatCompletionRequestStop]](../../models/chatcompletionrequeststop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | -| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionRequestToolChoice]](../../models/chatcompletionrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | @@ -72,8 +72,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## stream @@ -93,8 +93,8 @@ with Mistral( res = mistral.chat.stream(model="mistral-large-latest", messages=[ { - "content": "Who is the best French painter? Answer in one short sentence.", "role": "user", + "content": "Who is the best French painter? Answer in one short sentence.", }, ], stream=True, response_format={ "type": "text", @@ -120,7 +120,7 @@ with Mistral( | `stop` | [Optional[models.ChatCompletionStreamRequestStop]](../../models/chatcompletionstreamrequeststop.md) | :heavy_minus_sign: | Stop generation if this token is detected. Or if one of these tokens is detected when providing an array | | | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | -| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionStreamRequestToolChoice]](../../models/chatcompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | @@ -140,5 +140,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/classifiers/README.md b/docs/sdks/classifiers/README.md index 41b52081..dc0f4984 100644 --- a/docs/sdks/classifiers/README.md +++ b/docs/sdks/classifiers/README.md @@ -17,7 +17,7 @@ Moderations ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -27,10 +27,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.classifiers.moderate(model="Durango", inputs=[ - "", - "", - ]) + res = mistral.classifiers.moderate(model="mistral-moderation-latest", inputs="") # Handle response print(res) @@ -54,8 +51,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## moderate_chat @@ -63,7 +60,7 @@ Chat Moderations ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -75,8 +72,8 @@ with Mistral( res = mistral.classifiers.moderate_chat(inputs=[ { - "content": "", "role": "tool", + "content": "", }, ], model="LeBaron") @@ -101,8 +98,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## classify @@ -146,8 +143,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## classify_chat @@ -165,12 +162,12 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.classifiers.classify_chat(model="Camry", inputs=[ + res = mistral.classifiers.classify_chat(model="Camry", input=[ { "messages": [ { - "content": "", "role": "system", + "content": "", }, ], }, @@ -186,7 +183,7 @@ with Mistral( | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | | `model` | *str* | :heavy_check_mark: | N/A | -| `inputs` | [models.Inputs](../../models/inputs.md) | :heavy_check_mark: | Chat to classify | +| `input` | [models.Inputs](../../models/inputs.md) | :heavy_check_mark: | Chat to classify | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -197,5 +194,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/conversations/README.md b/docs/sdks/conversations/README.md index c0089f12..e77d329b 100644 --- a/docs/sdks/conversations/README.md +++ b/docs/sdks/conversations/README.md @@ -24,7 +24,7 @@ Create a new conversation, using a base model or an agent and append entries. Co ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -72,8 +72,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## list @@ -81,7 +81,7 @@ Retrieve a list of conversation entities sorted by creation time. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -109,14 +109,14 @@ with Mistral( ### Response -**[List[models.ListConversationsResponse]](../../models/.md)** +**[List[models.AgentsAPIV1ConversationsListResponse]](../../models/.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -124,7 +124,7 @@ Given a conversation_id retrieve a conversation entity with its attributes. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -156,8 +156,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -165,7 +165,7 @@ Delete a conversation given a conversation_id. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -192,8 +192,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## append @@ -201,7 +201,7 @@ Run completion on the history of the conversation and the user entries. Return t ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -211,7 +211,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.append(conversation_id="", inputs=[], stream=False, store=True, handoff_execution="server", completion_args={ + res = mistral.beta.conversations.append(conversation_id="", stream=False, store=True, handoff_execution="server", completion_args={ "response_format": { "type": "text", }, @@ -227,11 +227,12 @@ with Mistral( | Parameter | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | | `conversation_id` | *str* | :heavy_check_mark: | ID of the conversation to which we append entries. | -| `inputs` | [models.ConversationInputs](../../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationAppendRequestHandoffExecution]](../../models/conversationappendrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | | `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `tool_confirmations` | List[[models.ToolCallConfirmation](../../models/toolcallconfirmation.md)] | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -242,8 +243,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get_history @@ -251,7 +252,7 @@ Given a conversation_id retrieve all the entries belonging to that conversation. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -283,8 +284,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get_messages @@ -292,7 +293,7 @@ Given a conversation_id retrieve all the messages belonging to that conversation ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -324,8 +325,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## restart @@ -333,7 +334,7 @@ Given a conversation_id and an id, recreate a conversation from this point and r ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -343,7 +344,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.restart(conversation_id="", inputs="", from_entry_id="", stream=False, store=True, handoff_execution="server", completion_args={ + res = mistral.beta.conversations.restart(conversation_id="", from_entry_id="", stream=False, store=True, handoff_execution="server", completion_args={ "response_format": { "type": "text", }, @@ -359,8 +360,8 @@ with Mistral( | Parameter | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | | `conversation_id` | *str* | :heavy_check_mark: | ID of the original conversation which is being restarted. | -| `inputs` | [models.ConversationInputs](../../models/conversationinputs.md) | :heavy_check_mark: | N/A | | `from_entry_id` | *str* | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationRestartRequestHandoffExecution]](../../models/conversationrestartrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | @@ -377,8 +378,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## start_stream @@ -386,7 +387,7 @@ Create a new conversation, using a base model or an agent and append entries. Co ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -396,7 +397,14 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.start_stream(inputs="", stream=True, completion_args={ + res = mistral.beta.conversations.start_stream(inputs=[ + { + "object": "entry", + "type": "function.result", + "tool_call_id": "", + "result": "", + }, + ], stream=True, completion_args={ "response_format": { "type": "text", }, @@ -436,8 +444,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## append_stream @@ -445,7 +453,7 @@ Run completion on the history of the conversation and the user entries. Return t ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -455,7 +463,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.append_stream(conversation_id="", inputs="", stream=True, store=True, handoff_execution="server", completion_args={ + res = mistral.beta.conversations.append_stream(conversation_id="", stream=True, store=True, handoff_execution="server", completion_args={ "response_format": { "type": "text", }, @@ -473,11 +481,12 @@ with Mistral( | Parameter | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | | `conversation_id` | *str* | :heavy_check_mark: | ID of the conversation to which we append entries. | -| `inputs` | [models.ConversationInputs](../../models/conversationinputs.md) | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationAppendStreamRequestHandoffExecution]](../../models/conversationappendstreamrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | | `completion_args` | [Optional[models.CompletionArgs]](../../models/completionargs.md) | :heavy_minus_sign: | White-listed arguments from the completion API | +| `tool_confirmations` | List[[models.ToolCallConfirmation](../../models/toolcallconfirmation.md)] | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -488,8 +497,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## restart_stream @@ -497,7 +506,7 @@ Given a conversation_id and an id, recreate a conversation from this point and r ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -507,7 +516,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.conversations.restart_stream(conversation_id="", inputs="", from_entry_id="", stream=True, store=True, handoff_execution="server", completion_args={ + res = mistral.beta.conversations.restart_stream(conversation_id="", from_entry_id="", stream=True, store=True, handoff_execution="server", completion_args={ "response_format": { "type": "text", }, @@ -525,8 +534,8 @@ with Mistral( | Parameter | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | | `conversation_id` | *str* | :heavy_check_mark: | ID of the original conversation which is being restarted. | -| `inputs` | [models.ConversationInputs](../../models/conversationinputs.md) | :heavy_check_mark: | N/A | | `from_entry_id` | *str* | :heavy_check_mark: | N/A | +| `inputs` | [Optional[models.ConversationInputs]](../../models/conversationinputs.md) | :heavy_minus_sign: | N/A | | `stream` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `store` | *Optional[bool]* | :heavy_minus_sign: | Whether to store the results into our servers or not. | | `handoff_execution` | [Optional[models.ConversationRestartStreamRequestHandoffExecution]](../../models/conversationrestartstreamrequesthandoffexecution.md) | :heavy_minus_sign: | N/A | @@ -543,5 +552,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/documents/README.md b/docs/sdks/documents/README.md index 97831f86..9c219b67 100644 --- a/docs/sdks/documents/README.md +++ b/docs/sdks/documents/README.md @@ -23,7 +23,7 @@ Given a library, lists the document that have been uploaded to that library. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -33,7 +33,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.list(library_id="05e1bda5-99b1-4baf-bb03-905d8e094f74", page_size=100, page=0, sort_by="created_at", sort_order="desc") + res = mistral.beta.libraries.documents.list(library_id="5c3ca4cd-62bc-4c71-ad8a-1531ae80d078", page_size=100, page=0, sort_by="created_at", sort_order="desc") # Handle response print(res) @@ -55,14 +55,14 @@ with Mistral( ### Response -**[models.ListDocumentOut](../../models/listdocumentout.md)** +**[models.ListDocumentsResponse](../../models/listdocumentsresponse.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## upload @@ -70,7 +70,7 @@ Given a library, upload a new document to that library. It is queued for process ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -80,7 +80,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.upload(library_id="f973c54e-979a-4464-9d36-8cc31beb21fe", file={ + res = mistral.beta.libraries.documents.upload(library_id="a02150d9-5ee0-4877-b62c-28b1fcdf3b76", file={ "file_name": "example.file", "content": open("example.file", "rb"), }) @@ -100,14 +100,14 @@ with Mistral( ### Response -**[models.DocumentOut](../../models/documentout.md)** +**[models.Document](../../models/document.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -115,7 +115,7 @@ Given a library and a document in this library, you can retrieve the metadata of ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -125,7 +125,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.get(library_id="f9902d0a-1ea4-4953-be48-52df6edd302a", document_id="c3e12fd9-e840-46f2-8d4a-79985ed36d24") + res = mistral.beta.libraries.documents.get(library_id="03d908c8-90a1-44fd-bf3a-8490fb7c9a03", document_id="90973aec-0508-4375-8b00-91d732414745") # Handle response print(res) @@ -142,14 +142,14 @@ with Mistral( ### Response -**[models.DocumentOut](../../models/documentout.md)** +**[models.Document](../../models/document.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update @@ -157,7 +157,7 @@ Given a library and a document in that library, update the name of that document ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -167,7 +167,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.update(library_id="3b900c67-d2b6-4637-93f2-3eff2c85f8dd", document_id="66f935fd-37ec-441f-bca5-b1129befcbca") + res = mistral.beta.libraries.documents.update(library_id="3ddd8d93-dca5-4a6d-980d-173226c35742", document_id="2a25e44c-b160-40ca-b5c2-b65fb2fcae34") # Handle response print(res) @@ -186,14 +186,14 @@ with Mistral( ### Response -**[models.DocumentOut](../../models/documentout.md)** +**[models.Document](../../models/document.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -201,7 +201,7 @@ Given a library and a document in that library, delete that document. The docume ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -211,7 +211,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - mistral.beta.libraries.documents.delete(library_id="c728d742-7845-462b-84ad-2aacbaf1c7cf", document_id="ed3f5797-846a-4abe-8e30-39b2fd2323e0") + mistral.beta.libraries.documents.delete(library_id="005daae9-d42e-407d-82d7-2261c6a1496c", document_id="edc236b0-baff-49a9-884b-4ca36a258da4") # Use the SDK ... @@ -229,8 +229,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## text_content @@ -238,7 +238,7 @@ Given a library and a document in that library, you can retrieve the text conten ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -248,7 +248,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.text_content(library_id="12689dc1-50df-4a0d-8202-2757f7a8c141", document_id="9d4057e9-d112-437c-911e-6ee031389739") + res = mistral.beta.libraries.documents.text_content(library_id="1d177215-3b6b-45ba-9fa9-baf773223bec", document_id="60214c91-2aba-4692-a4e6-a53365de8caf") # Handle response print(res) @@ -271,8 +271,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## status @@ -280,7 +280,7 @@ Given a library and a document in that library, retrieve the processing status o ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -290,7 +290,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.status(library_id="41bb33c4-7e53-453d-bf21-398bb2862772", document_id="416b95cf-19c8-45af-84be-26aaa3ab3666") + res = mistral.beta.libraries.documents.status(library_id="e6906f70-368f-4155-80da-c1718f01bc43", document_id="2c904915-d831-4e9d-a345-8ce405bcef66") # Handle response print(res) @@ -313,8 +313,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get_signed_url @@ -322,7 +322,7 @@ Given a library and a document in that library, retrieve the signed URL of a spe ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -332,7 +332,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.get_signed_url(library_id="2dbbe172-1374-41be-b03d-a088c733612e", document_id="b5d88764-47f1-4485-9df1-658775428344") + res = mistral.beta.libraries.documents.get_signed_url(library_id="23cf6904-a602-4ee8-9f5b-8efc557c336d", document_id="48598486-df71-4994-acbb-1133c72efa8c") # Handle response print(res) @@ -349,14 +349,14 @@ with Mistral( ### Response -**[str](../../models/.md)** +**[str](../../models/responselibrariesdocumentsgetsignedurlv1.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## extracted_text_signed_url @@ -364,7 +364,7 @@ Given a library and a document in that library, retrieve the signed URL of text ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -374,7 +374,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.documents.extracted_text_signed_url(library_id="46d040ce-ae2e-4891-a54c-cdab6a8f62d8", document_id="3eddbfe2-3fd7-47f5-984b-b378e6950e37") + res = mistral.beta.libraries.documents.extracted_text_signed_url(library_id="a6f15de3-1e82-4f95-af82-851499042ef8", document_id="9749d4f9-24e5-4ca2-99a3-a406863f805d") # Handle response print(res) @@ -391,14 +391,14 @@ with Mistral( ### Response -**[str](../../models/.md)** +**[str](../../models/responselibrariesdocumentsgetextractedtextsignedurlv1.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## reprocess @@ -406,7 +406,7 @@ Given a library and a document in that library, reprocess that document, it will ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -416,7 +416,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - mistral.beta.libraries.documents.reprocess(library_id="76d357e4-d891-40c6-9d1e-6d6ce5056ee0", document_id="09798d2b-8f46-46c6-9765-8054a82a4bb2") + mistral.beta.libraries.documents.reprocess(library_id="51b29371-de8f-4ba4-932b-a0bafb3a7f64", document_id="3052422c-49ca-45ac-a918-cadb35d61fd8") # Use the SDK ... @@ -434,5 +434,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/embeddings/README.md b/docs/sdks/embeddings/README.md index 0be7ea6d..eecb5c9e 100644 --- a/docs/sdks/embeddings/README.md +++ b/docs/sdks/embeddings/README.md @@ -14,7 +14,7 @@ Embeddings ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -54,5 +54,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/files/README.md b/docs/sdks/files/README.md index ae29b7bf..9507326b 100644 --- a/docs/sdks/files/README.md +++ b/docs/sdks/files/README.md @@ -23,7 +23,7 @@ Please contact us if you need to increase these storage limits. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -53,13 +53,13 @@ with Mistral( ### Response -**[models.UploadFileOut](../../models/uploadfileout.md)** +**[models.CreateFileResponse](../../models/createfileresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## list @@ -67,7 +67,7 @@ Returns a list of files that belong to the user's organization. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -100,13 +100,13 @@ with Mistral( ### Response -**[models.ListFilesOut](../../models/listfilesout.md)** +**[models.ListFilesResponse](../../models/listfilesresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## retrieve @@ -114,7 +114,7 @@ Returns information about a specific file. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -124,7 +124,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.files.retrieve(file_id="654a62d9-b7ee-49ac-835e-af4153e3c9ec") + res = mistral.files.retrieve(file_id="f2a27685-ca4e-4dc2-9f2b-88c422c3e0f6") # Handle response print(res) @@ -140,13 +140,13 @@ with Mistral( ### Response -**[models.RetrieveFileOut](../../models/retrievefileout.md)** +**[models.GetFileResponse](../../models/getfileresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -154,7 +154,7 @@ Delete a file. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -164,7 +164,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.files.delete(file_id="789c27a4-69de-47c6-b67f-cf6e56ce9f41") + res = mistral.files.delete(file_id="3b6d45eb-e30b-416f-8019-f47e2e93d930") # Handle response print(res) @@ -180,13 +180,13 @@ with Mistral( ### Response -**[models.DeleteFileOut](../../models/deletefileout.md)** +**[models.DeleteFileResponse](../../models/deletefileresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## download @@ -194,7 +194,7 @@ Download a file ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -204,7 +204,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.files.download(file_id="e2ba278e-eac9-4050-ae8e-ec433e124efb") + res = mistral.files.download(file_id="f8919994-a4a1-46b2-8b5b-06335a4300ce") # Handle response print(res) @@ -226,7 +226,7 @@ with Mistral( | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get_signed_url @@ -234,7 +234,7 @@ Get Signed Url ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -244,7 +244,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.files.get_signed_url(file_id="7a0c108d-9e6b-4c47-990d-a20cba50b283", expiry=24) + res = mistral.files.get_signed_url(file_id="06a020ab-355c-49a6-b19d-304b7c01699f", expiry=24) # Handle response print(res) @@ -261,10 +261,10 @@ with Mistral( ### Response -**[models.FileSignedURL](../../models/filesignedurl.md)** +**[models.GetSignedURLResponse](../../models/getsignedurlresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/fim/README.md b/docs/sdks/fim/README.md index 3c8c59c7..49151bf5 100644 --- a/docs/sdks/fim/README.md +++ b/docs/sdks/fim/README.md @@ -15,7 +15,7 @@ FIM completion. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -25,7 +25,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.fim.complete(model="codestral-2405", prompt="def", top_p=1, stream=False, suffix="return a+b") + res = mistral.fim.complete(model="codestral-latest", prompt="def", top_p=1, stream=False, suffix="return a+b") # Handle response print(res) @@ -57,8 +57,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## stream @@ -110,5 +110,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/finetuningjobs/README.md b/docs/sdks/finetuningjobs/README.md index fe18feeb..4262b3a9 100644 --- a/docs/sdks/finetuningjobs/README.md +++ b/docs/sdks/finetuningjobs/README.md @@ -16,7 +16,7 @@ Get a list of fine-tuning jobs for your organization and user. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -35,29 +35,29 @@ with Mistral( ### Parameters -| Parameter | Type | Required | Description | -| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `page` | *Optional[int]* | :heavy_minus_sign: | The page number of the results to be returned. | -| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of items to return per page. | -| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model name used for fine-tuning to filter on. When set, the other results are not displayed. | -| `created_after` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date/time to filter on. When set, the results for previous creation times are not displayed. | -| `created_before` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | -| `created_by_me` | *Optional[bool]* | :heavy_minus_sign: | When set, only return results for jobs created by the API caller. Other results are not displayed. | -| `status` | [OptionalNullable[models.ListFineTuningJobsStatus]](../../models/listfinetuningjobsstatus.md) | :heavy_minus_sign: | The current job state to filter on. When set, the other results are not displayed. | -| `wandb_project` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weights and Biases project to filter on. When set, the other results are not displayed. | -| `wandb_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weight and Biases run name to filter on. When set, the other results are not displayed. | -| `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | The model suffix to filter on. When set, the other results are not displayed. | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | +| `page` | *Optional[int]* | :heavy_minus_sign: | The page number of the results to be returned. | +| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of items to return per page. | +| `model` | *OptionalNullable[str]* | :heavy_minus_sign: | The model name used for fine-tuning to filter on. When set, the other results are not displayed. | +| `created_after` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date/time to filter on. When set, the results for previous creation times are not displayed. | +| `created_before` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | +| `created_by_me` | *Optional[bool]* | :heavy_minus_sign: | When set, only return results for jobs created by the API caller. Other results are not displayed. | +| `status` | [OptionalNullable[models.JobsAPIRoutesFineTuningGetFineTuningJobsStatus]](../../models/jobsapiroutesfinetuninggetfinetuningjobsstatus.md) | :heavy_minus_sign: | The current job state to filter on. When set, the other results are not displayed. | +| `wandb_project` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weights and Biases project to filter on. When set, the other results are not displayed. | +| `wandb_name` | *OptionalNullable[str]* | :heavy_minus_sign: | The Weight and Biases run name to filter on. When set, the other results are not displayed. | +| `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | The model suffix to filter on. When set, the other results are not displayed. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response -**[models.JobsOut](../../models/jobsout.md)** +**[models.ListFineTuningJobsResponse](../../models/listfinetuningjobsresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## create @@ -65,7 +65,7 @@ Create a new fine-tuning job, it will be queued for processing. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -75,7 +75,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.fine_tuning.jobs.create(model="Countach", hyperparameters={ + res = mistral.fine_tuning.jobs.create(model="Camaro", hyperparameters={ "learning_rate": 0.0001, }, invalid_sample_skip_percentage=0) @@ -93,23 +93,23 @@ with Mistral( | `training_files` | List[[models.TrainingFile](../../models/trainingfile.md)] | :heavy_minus_sign: | N/A | | `validation_files` | List[*str*] | :heavy_minus_sign: | A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files. | | `suffix` | *OptionalNullable[str]* | :heavy_minus_sign: | A string that will be added to your fine-tuning model name. For example, a suffix of "my-great-model" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...` | -| `integrations` | List[[models.JobInIntegration](../../models/jobinintegration.md)] | :heavy_minus_sign: | A list of integrations to enable for your fine-tuning job. | +| `integrations` | List[[models.CreateFineTuningJobRequestIntegration](../../models/createfinetuningjobrequestintegration.md)] | :heavy_minus_sign: | A list of integrations to enable for your fine-tuning job. | | `auto_start` | *Optional[bool]* | :heavy_minus_sign: | This field will be required in a future release. | | `invalid_sample_skip_percentage` | *Optional[float]* | :heavy_minus_sign: | N/A | | `job_type` | [OptionalNullable[models.FineTuneableModelType]](../../models/finetuneablemodeltype.md) | :heavy_minus_sign: | N/A | -| `repositories` | List[[models.JobInRepository](../../models/jobinrepository.md)] | :heavy_minus_sign: | N/A | -| `classifier_targets` | List[[models.ClassifierTargetIn](../../models/classifiertargetin.md)] | :heavy_minus_sign: | N/A | +| `repositories` | List[[models.CreateFineTuningJobRequestRepository](../../models/createfinetuningjobrequestrepository.md)] | :heavy_minus_sign: | N/A | +| `classifier_targets` | List[[models.ClassifierTarget](../../models/classifiertarget.md)] | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response -**[models.CreateFineTuningJobResponse](../../models/createfinetuningjobresponse.md)** +**[models.JobsAPIRoutesFineTuningCreateFineTuningJobResponse](../../models/jobsapiroutesfinetuningcreatefinetuningjobresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -117,7 +117,7 @@ Get a fine-tuned job details by its UUID. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -127,7 +127,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.fine_tuning.jobs.get(job_id="2855f873-414e-4cf5-a46e-e589e39ee809") + res = mistral.fine_tuning.jobs.get(job_id="c167a961-ffca-4bcf-93ac-6169468dd389") # Handle response print(res) @@ -143,13 +143,13 @@ with Mistral( ### Response -**[models.GetFineTuningJobResponse](../../models/getfinetuningjobresponse.md)** +**[models.JobsAPIRoutesFineTuningGetFineTuningJobResponse](../../models/jobsapiroutesfinetuninggetfinetuningjobresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## cancel @@ -157,7 +157,7 @@ Request the cancellation of a fine tuning job. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -167,7 +167,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.fine_tuning.jobs.cancel(job_id="ee7d6f03-fcbb-43ca-8f17-0388c0832eb9") + res = mistral.fine_tuning.jobs.cancel(job_id="6188a2f6-7513-4e0f-89cc-3f8088523a49") # Handle response print(res) @@ -183,13 +183,13 @@ with Mistral( ### Response -**[models.CancelFineTuningJobResponse](../../models/cancelfinetuningjobresponse.md)** +**[models.JobsAPIRoutesFineTuningCancelFineTuningJobResponse](../../models/jobsapiroutesfinetuningcancelfinetuningjobresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## start @@ -197,7 +197,7 @@ Request the start of a validated fine tuning job. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -207,7 +207,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.fine_tuning.jobs.start(job_id="da371429-0ec2-4cea-b9c7-73ce3a1dd76f") + res = mistral.fine_tuning.jobs.start(job_id="56553e4d-0679-471e-b9ac-59a77d671103") # Handle response print(res) @@ -223,10 +223,10 @@ with Mistral( ### Response -**[models.StartFineTuningJobResponse](../../models/startfinetuningjobresponse.md)** +**[models.JobsAPIRoutesFineTuningStartFineTuningJobResponse](../../models/jobsapiroutesfinetuningstartfinetuningjobresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/libraries/README.md b/docs/sdks/libraries/README.md index 8835d0ec..7df1ef4e 100644 --- a/docs/sdks/libraries/README.md +++ b/docs/sdks/libraries/README.md @@ -18,7 +18,7 @@ List all libraries that you have created or have been shared with you. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -43,13 +43,13 @@ with Mistral( ### Response -**[models.ListLibraryOut](../../models/listlibraryout.md)** +**[models.ListLibrariesResponse](../../models/listlibrariesresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## create @@ -57,7 +57,7 @@ Create a new Library, you will be marked as the owner and only you will have the ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -85,14 +85,14 @@ with Mistral( ### Response -**[models.LibraryOut](../../models/libraryout.md)** +**[models.Library](../../models/library.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## get @@ -100,7 +100,7 @@ Given a library id, details information about that Library. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -110,7 +110,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.get(library_id="44e385d6-783e-4b21-8fae-5181e6817bc4") + res = mistral.beta.libraries.get(library_id="d0d23a1e-bfe5-45e7-b7bb-22a4ea78d47f") # Handle response print(res) @@ -126,14 +126,14 @@ with Mistral( ### Response -**[models.LibraryOut](../../models/libraryout.md)** +**[models.Library](../../models/library.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -141,7 +141,7 @@ Given a library id, deletes it together with all documents that have been upload ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -151,7 +151,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.delete(library_id="441ba08a-3d1f-4700-8d6f-f32eeed49dff") + res = mistral.beta.libraries.delete(library_id="6cad0b6e-fd2e-4d11-a48b-21d30fb7c17a") # Handle response print(res) @@ -167,14 +167,14 @@ with Mistral( ### Response -**[models.LibraryOut](../../models/libraryout.md)** +**[models.Library](../../models/library.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update @@ -182,7 +182,7 @@ Given a library id, you can update the name and description. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -192,7 +192,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.beta.libraries.update(library_id="27049553-3425-49ce-b965-fcb3a7ab03a3") + res = mistral.beta.libraries.update(library_id="e01880c3-d0b5-4a29-8b1b-abdb8ce917e4") # Handle response print(res) @@ -210,11 +210,11 @@ with Mistral( ### Response -**[models.LibraryOut](../../models/libraryout.md)** +**[models.Library](../../models/library.md)** ### Errors | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/models/README.md b/docs/sdks/models/README.md index 0cbf1bdd..311a2db6 100644 --- a/docs/sdks/models/README.md +++ b/docs/sdks/models/README.md @@ -19,7 +19,7 @@ List all models available to the user. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -50,7 +50,7 @@ with Mistral( | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## retrieve @@ -58,7 +58,7 @@ Retrieve information about a model. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -90,8 +90,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## delete @@ -99,7 +99,7 @@ Delete a fine-tuned model. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -131,8 +131,8 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | ## update @@ -140,7 +140,7 @@ Update a model name or description. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -168,13 +168,13 @@ with Mistral( ### Response -**[models.UpdateModelResponse](../../models/updatemodelresponse.md)** +**[models.JobsAPIRoutesFineTuningUpdateFineTunedModelResponse](../../models/jobsapiroutesfinetuningupdatefinetunedmodelresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## archive @@ -182,7 +182,7 @@ Archive a fine-tuned model. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -208,13 +208,13 @@ with Mistral( ### Response -**[models.ArchiveFTModelOut](../../models/archiveftmodelout.md)** +**[models.ArchiveModelResponse](../../models/archivemodelresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## unarchive @@ -222,7 +222,7 @@ Un-archive a fine-tuned model. ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -248,10 +248,10 @@ with Mistral( ### Response -**[models.UnarchiveFTModelOut](../../models/unarchiveftmodelout.md)** +**[models.UnarchiveModelResponse](../../models/unarchivemodelresponse.md)** ### Errors | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/ocr/README.md b/docs/sdks/ocr/README.md index 9fd9d6fc..fde2a823 100644 --- a/docs/sdks/ocr/README.md +++ b/docs/sdks/ocr/README.md @@ -14,7 +14,7 @@ OCR ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -25,10 +25,8 @@ with Mistral( ) as mistral: res = mistral.ocr.process(model="CX-9", document={ - "image_url": { - "url": "https://measly-scrap.com", - }, - "type": "image_url", + "type": "document_url", + "document_url": "https://upset-labourer.net/", }, bbox_annotation_format={ "type": "text", }, document_annotation_format={ @@ -42,22 +40,22 @@ with Mistral( ### Parameters -| Parameter | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | -| `document` | [models.Document](../../models/document.md) | :heavy_check_mark: | Document to run OCR on | | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | -| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | -| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | -| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | -| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | -| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | -| `table_format` | [OptionalNullable[models.TableFormat]](../../models/tableformat.md) | :heavy_minus_sign: | N/A | | -| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | -| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | +| Parameter | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | +| `document` | [models.DocumentUnion](../../models/documentunion.md) | :heavy_check_mark: | Document to run OCR on | | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | +| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | +| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | +| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | +| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | +| `table_format` | [OptionalNullable[models.TableFormat]](../../models/tableformat.md) | :heavy_minus_sign: | N/A | | +| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | ### Response @@ -67,5 +65,5 @@ with Mistral( | Error Type | Status Code | Content Type | | -------------------------- | -------------------------- | -------------------------- | -| models.HTTPValidationError | 422 | application/json | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/transcriptions/README.md b/docs/sdks/transcriptions/README.md index 9691b81d..97703c9b 100644 --- a/docs/sdks/transcriptions/README.md +++ b/docs/sdks/transcriptions/README.md @@ -15,7 +15,7 @@ Create Transcription ### Example Usage - + ```python from mistralai.client import Mistral import os @@ -25,7 +25,7 @@ with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), ) as mistral: - res = mistral.audio.transcriptions.complete(model="Model X", diarize=False) + res = mistral.audio.transcriptions.complete(model="voxtral-mini-latest", diarize=False) # Handle response print(res) @@ -36,7 +36,7 @@ with Mistral( | Parameter | Type | Required | Description | Example | | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | -| `model` | *str* | :heavy_check_mark: | ID of the model to be used. | voxtral-mini-latest | +| `model` | *str* | :heavy_check_mark: | ID of the model to be used. | **Example 1:** voxtral-mini-latest
**Example 2:** voxtral-mini-2507 | | `file` | [Optional[models.File]](../../models/file.md) | :heavy_minus_sign: | N/A | | | `file_url` | *OptionalNullable[str]* | :heavy_minus_sign: | Url of a file to be transcribed | | | `file_id` | *OptionalNullable[str]* | :heavy_minus_sign: | ID of a file uploaded to /v1/files | | @@ -55,7 +55,7 @@ with Mistral( | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | +| errors.SDKError | 4XX, 5XX | \*/\* | ## stream @@ -105,4 +105,4 @@ with Mistral( | Error Type | Status Code | Content Type | | --------------- | --------------- | --------------- | -| models.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/examples/mistral/audio/chat_streaming.py b/examples/mistral/audio/chat_streaming.py index a9ab2323..b418ef57 100755 --- a/examples/mistral/audio/chat_streaming.py +++ b/examples/mistral/audio/chat_streaming.py @@ -2,7 +2,8 @@ import os -from mistralai.client import Mistral, File +from mistralai.client import Mistral +from mistralai.client.models import File from mistralai.client.models import UserMessage diff --git a/examples/mistral/audio/transcription_async.py b/examples/mistral/audio/transcription_async.py index c8fd9ae6..f04f397e 100644 --- a/examples/mistral/audio/transcription_async.py +++ b/examples/mistral/audio/transcription_async.py @@ -2,7 +2,8 @@ import os import asyncio -from mistralai.client import Mistral, File +from mistralai.client import Mistral +from mistralai.client.models import File async def main(): diff --git a/examples/mistral/audio/transcription_diarize_async.py b/examples/mistral/audio/transcription_diarize_async.py index cbdf3512..4b511c87 100644 --- a/examples/mistral/audio/transcription_diarize_async.py +++ b/examples/mistral/audio/transcription_diarize_async.py @@ -3,7 +3,8 @@ import os import asyncio import pathlib -from mistralai.client import Mistral, File +from mistralai.client import Mistral +from mistralai.client.models import File fixture_dir = pathlib.Path(__file__).parents[2] / "fixtures" diff --git a/examples/mistral/audio/transcription_stream_async.py b/examples/mistral/audio/transcription_stream_async.py index 6e64dcf7..3055f3de 100644 --- a/examples/mistral/audio/transcription_stream_async.py +++ b/examples/mistral/audio/transcription_stream_async.py @@ -2,7 +2,8 @@ import asyncio import os -from mistralai.client import Mistral, File +from mistralai.client import Mistral +from mistralai.client.models import File async def main(): diff --git a/examples/mistral/classifier/async_classifier.py b/examples/mistral/classifier/async_classifier.py index 45cc14fa..881f6a69 100644 --- a/examples/mistral/classifier/async_classifier.py +++ b/examples/mistral/classifier/async_classifier.py @@ -2,8 +2,8 @@ from pprint import pprint import asyncio -from mistralai.client import Mistral, TrainingFile, ClassifierTrainingParametersIn -from mistralai.client.models import ClassifierJobOut +from mistralai.client import Mistral +from mistralai.client.models import ClassifierFineTuningJob, ClassifierFineTuningJobDetails, ClassifierTrainingParameters, TrainingFile import os @@ -36,12 +36,12 @@ async def train_classifier(client: Mistral, training_file_ids: list[str]) -> str TrainingFile(file_id=training_file_id) for training_file_id in training_file_ids ], - hyperparameters=ClassifierTrainingParametersIn( + hyperparameters=ClassifierTrainingParameters( learning_rate=0.0001, ), auto_start=True, ) - if not isinstance(job, ClassifierJobOut): + if not isinstance(job, ClassifierFineTuningJob): print("Unexpected job type returned") return None @@ -51,6 +51,8 @@ async def train_classifier(client: Mistral, training_file_ids: list[str]) -> str while True: await asyncio.sleep(10) detailed_job = await client.fine_tuning.jobs.get_async(job_id=job.id) + if not isinstance(detailed_job, ClassifierFineTuningJobDetails): + raise Exception(f"Unexpected job type: {type(detailed_job)}") if detailed_job.status not in [ "QUEUED", "STARTED", diff --git a/examples/mistral/jobs/async_batch_job_chat_completion_inline.py b/examples/mistral/jobs/async_batch_job_chat_completion_inline.py index 8b4cedd3..d2a1679f 100644 --- a/examples/mistral/jobs/async_batch_job_chat_completion_inline.py +++ b/examples/mistral/jobs/async_batch_job_chat_completion_inline.py @@ -1,4 +1,5 @@ -from mistralai.client import Mistral, BatchRequest, UserMessage +from mistralai.client import Mistral +from mistralai.client.models import BatchRequest, UserMessage import os import asyncio diff --git a/packages/azure/.speakeasy/gen.lock b/packages/azure/.speakeasy/gen.lock index 5cf1d8e1..c795c61c 100644 --- a/packages/azure/.speakeasy/gen.lock +++ b/packages/azure/.speakeasy/gen.lock @@ -3,46 +3,46 @@ id: dc40fa48-2c4d-46ad-ac8b-270749770f34 management: docChecksum: 571037b8485712afcef86703debb7f15 docVersion: 1.0.0 - speakeasyVersion: 1.685.0 - generationVersion: 2.794.1 - releaseVersion: 2.0.0a4 - configChecksum: 549cf1eae199d39bf97052462fd8e640 + speakeasyVersion: 1.729.0 + generationVersion: 2.841.0 + releaseVersion: 2.0.0-a4.1 + configChecksum: e2523ba89eba35872d05ddb673dd862a repoURL: https://github.com/mistralai/client-python.git repoSubDirectory: packages/azure installationURL: https://github.com/mistralai/client-python.git#subdirectory=packages/azure published: true persistentEdits: - generation_id: b0dbfbbb-4028-4834-9980-a1d2dba52a8d - pristine_commit_hash: 6cab3cf0757d3c7dd58ee1eabec66dd63a8c9a03 - pristine_tree_hash: abf5c6e4b603142b1a6aac936d7c3be574611256 + generation_id: 1812b54a-0aa7-4b43-8c53-d70427856543 + pristine_commit_hash: 28db2945de995b5707dc7f310b5291435aaafcbf + pristine_tree_hash: b01973b36166a61d38fa84cf7dae49b7a74e1402 features: python: additionalDependencies: 1.0.0 additionalProperties: 1.0.1 configurableModuleName: 0.2.0 - constsAndDefaults: 1.0.5 - core: 5.23.18 + constsAndDefaults: 1.0.7 + core: 6.0.12 defaultEnabledRetries: 0.2.0 enumUnions: 0.1.0 envVarSecurityUsage: 0.3.2 examples: 3.0.2 flatRequests: 1.0.1 - globalSecurity: 3.0.4 + globalSecurity: 3.0.5 globalSecurityCallbacks: 1.0.0 globalSecurityFlattening: 1.0.0 globalServerURLs: 3.2.0 includes: 3.0.0 methodArguments: 1.0.2 - nameOverrides: 3.0.1 - nullables: 1.0.1 - openEnums: 1.0.2 - responseFormat: 1.0.1 - retries: 3.0.3 - sdkHooks: 1.2.0 - serverEvents: 1.0.11 + nameOverrides: 3.0.3 + nullables: 1.0.2 + openEnums: 1.0.4 + responseFormat: 1.1.0 + retries: 3.0.4 + sdkHooks: 1.2.1 + serverEvents: 1.0.13 serverEventsSentinels: 0.1.0 serverIDs: 3.0.0 - unions: 3.1.1 + unions: 3.1.4 trackedFiles: .gitattributes: id: 24139dae6567 @@ -52,6 +52,10 @@ trackedFiles: id: 89aa447020cd last_write_checksum: sha1:f84632c81029fcdda8c3b0c768d02b836fc80526 pristine_git_object: 8d79f0abb72526f1fb34a4c03e5bba612c6ba2ae + docs/errors/httpvalidationerror.md: + id: 7fe2e5327e07 + last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e + pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/arguments.md: id: 7ea5e33709a7 last_write_checksum: sha1:09eea126210d7fd0353e60a76bf1dbed173f13ec @@ -74,8 +78,8 @@ trackedFiles: pristine_git_object: b2f15ecbe88328de95b4961ddb3940fd8a6ee64b docs/models/chatcompletionrequest.md: id: adffe90369d0 - last_write_checksum: sha1:a404d37c6605a5524f1f48b418bacf46e86a9a68 - pristine_git_object: 3b0f7270840e257475f4b0f15f27e0c0152818d2 + last_write_checksum: sha1:00453565d70739471a4e1872c93b5b7e66fe6cb6 + pristine_git_object: f8715cd0a335c6dc0fda4b60400f11c4aa8a0a06 docs/models/chatcompletionrequestmessage.md: id: 3f5e170d418c last_write_checksum: sha1:7921c5a508a9f88adc01caab34e26182b8035607 @@ -94,8 +98,8 @@ trackedFiles: pristine_git_object: a0465ffbfc5558628953e03fbc53b80bbdc8649b docs/models/chatcompletionstreamrequest.md: id: cf8f29558a68 - last_write_checksum: sha1:daca00885f0d0f9863d8420bbee514723084813d - pristine_git_object: f78156a647ec63ca60ff423acbdee2b2404e4e60 + last_write_checksum: sha1:7233a19b12f3204b8e2259a4a09d0d9726609e4e + pristine_git_object: cc82a8c707268084865f86d71be82de5ebf6f821 docs/models/chatcompletionstreamrequestmessage.md: id: 053a98476cd2 last_write_checksum: sha1:8270692463fab1243d9de4bbef7162daa64e52c5 @@ -164,10 +168,6 @@ trackedFiles: id: 4b3bd62c0f26 last_write_checksum: sha1:754fe32bdffe53c1057b302702f5516f4e551cfb pristine_git_object: 87d7b4852de629015166605b273deb9341202dc0 - docs/models/httpvalidationerror.md: - id: a211c095f2ac - last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e - pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/imagedetail.md: id: f8217529b496 last_write_checksum: sha1:fdf19ac9459f64616240955cb81a84ef03e775c8 @@ -210,8 +210,8 @@ trackedFiles: pristine_git_object: 02473d44f73485fd7b7f0031d51bfac835d4036e docs/models/ocrrequest.md: id: 6862a3fc2d0f - last_write_checksum: sha1:9311e2c87f8f4512c35a717d3b063f2861f878d4 - pristine_git_object: 87929e53f8a74823b82ecce56d15f22228134fa6 + last_write_checksum: sha1:eefa8ad80773e00ac297f3cf806704ac6ac3557d + pristine_git_object: 2d26c19fd1cecb234d7fb761dd73cc0a59e622ad docs/models/ocrresponse.md: id: 30042328fb78 last_write_checksum: sha1:8e4a4ae404ea752f3e9f1108c2a5f89ed6cfb143 @@ -326,8 +326,8 @@ trackedFiles: pristine_git_object: 3e38f1a929f7d6b1d6de74604aa87e3d8f010544 pylintrc: id: 7ce8b9f946e6 - last_write_checksum: sha1:6b615d49741eb9ae16375d3a499767783d1128a1 - pristine_git_object: a8fcb932ba2a01c5e96e3b04c59371e930b75558 + last_write_checksum: sha1:8f871a5aac4b10bff724c9d91b8d7496eb1fbdde + pristine_git_object: 0391ac11bdc5526b697b69d047d568a611ce87d0 scripts/prepare_readme.py: id: e0c5957a6035 last_write_checksum: sha1:26b29aad3c23a98912fd881698c976aac55749fe @@ -338,8 +338,8 @@ trackedFiles: pristine_git_object: c35748f360329c2bc370e9b189f49b1a360b2c48 src/mistralai/azure/client/__init__.py: id: 5624bda9196d - last_write_checksum: sha1:36306d1d404b6aeb912d27f1d9c52f098ff7bf9b - pristine_git_object: dd02e42e4cc509dc90e6ae70493054021faa5f9c + last_write_checksum: sha1:da077c0bdfcef64a4a5aea91a17292f72fa2b088 + pristine_git_object: 833c68cd526fe34aab2b7e7c45f974f7f4b9e120 src/mistralai/azure/client/_hooks/__init__.py: id: 850c237217cb last_write_checksum: sha1:e3111289afd28ad557c21d9e2f918caabfb7037d @@ -354,76 +354,100 @@ trackedFiles: pristine_git_object: 3e4e39555d60adebe84e596c8323ee5b80676fc9 src/mistralai/azure/client/_version.py: id: a77160e60e5d - last_write_checksum: sha1:e26eb828e9a240042acc754f38dcf2e581e045aa - pristine_git_object: 4448d2a0fd803f43820378359c921d09eba6f43e + last_write_checksum: sha1:b1d1971d43e8f92bd55bb45653a228fd9de97af3 + pristine_git_object: 4f985cc69c492521664044337e5910f8e5a26b90 src/mistralai/azure/client/basesdk.py: id: 5a585a95ec21 - last_write_checksum: sha1:d7a4a959d7d3ca3cd22d8daf144c3b4d5c0d1210 - pristine_git_object: b0391ac078b4e2a5d9107ed014c1ca939a553c23 + last_write_checksum: sha1:0c2e686aa42d6aeeb103193aa058d6ddff7bcf74 + pristine_git_object: 0d4d9a440e6c7726b6bc7fc6525aa3dc009847eb src/mistralai/azure/client/chat.py: id: c18454e628d7 - last_write_checksum: sha1:cc1ff54b85ce494428ebf22ec01bd1199cd9e2b6 - pristine_git_object: 3348bf47eafb3fcfb2de0e7d512073e947b69554 + last_write_checksum: sha1:884e22b0e313662c67cec7101765d8d7ef0bc48a + pristine_git_object: 1051f9527851894988f7e1689923575cf72a0896 + src/mistralai/azure/client/errors/__init__.py: + id: f377703514d9 + last_write_checksum: sha1:36c516c11f8083c3380a72c1d0f0718a3345f24b + pristine_git_object: 79e2712c2e62121fb6dbaab15ca8487f0e16b07c + src/mistralai/azure/client/errors/httpvalidationerror.py: + id: c3ec0ad923e9 + last_write_checksum: sha1:f45b41c1ad980c5d481158209bf23fa795cc68bc + pristine_git_object: b4f2691e630a095ff09fbbce5e2ea3063592084f + src/mistralai/azure/client/errors/mistralazureerror.py: + id: fae868afae89 + last_write_checksum: sha1:25f4411c7411faad753d46118edf74828b1c9f7c + pristine_git_object: c5bf17528c7cf25bac8f8874f58692c601fcdd76 + src/mistralai/azure/client/errors/no_response_error.py: + id: b838df044e62 + last_write_checksum: sha1:7f326424a7d5ae1bcd5c89a0d6b3dbda9138942f + pristine_git_object: 1deab64bc43e1e65bf3c412d326a4032ce342366 + src/mistralai/azure/client/errors/responsevalidationerror.py: + id: 77ac5e93cdda + last_write_checksum: sha1:c1e045dbdda0199bc1d563819c0b38e877d0efef + pristine_git_object: 02397334d2b3bf2516808b69b2548564f650cbe0 + src/mistralai/azure/client/errors/sdkerror.py: + id: dfdd4b1d8928 + last_write_checksum: sha1:edc2baf6feb199e1b1ff1aad681622b44804299d + pristine_git_object: c4f3616cd2720a9b5d2a2c5b2d22a305629ebbe6 src/mistralai/azure/client/httpclient.py: id: 60c81037fbd0 last_write_checksum: sha1:5e55338d6ee9f01ab648cad4380201a8a3da7dd7 pristine_git_object: 89560b566073785535643e694c112bedbd3db13d src/mistralai/azure/client/models/__init__.py: id: "335011330e21" - last_write_checksum: sha1:9afe0f0fb324a2b3c60ec98ce78b1ff6f908db39 - pristine_git_object: 51db6a383ddbab2d946b00c41934359a7eb50448 + last_write_checksum: sha1:07054ca95df60a3f03d8ea37a361aa506f94b78b + pristine_git_object: 908dda32cebe894b37dccaaa9b84db174ac93c21 src/mistralai/azure/client/models/assistantmessage.py: id: 353ed9110f97 - last_write_checksum: sha1:e444c76e27b9b745b9238894bdf2b6a40bba6e6e - pristine_git_object: f5793f9455485c576293b44fb548be8bae9c7a65 + last_write_checksum: sha1:973979ac03f86f26ee9a540aaaa8f70a7011daca + pristine_git_object: e9ae6e82c3c758561c8c9663f27b2fd7e38d2911 src/mistralai/azure/client/models/chatcompletionchoice.py: id: 6942c7db5891 last_write_checksum: sha1:817bfda6120a98248322c308629e404081e01279 pristine_git_object: 67b5ba694217f4f3b95589d7f84af6a9bea9802d src/mistralai/azure/client/models/chatcompletionrequest.py: id: 0c711c870184 - last_write_checksum: sha1:fae2a92375aa3e58c258e4497acead859cd3b6dc - pristine_git_object: 921790959880ddf9b9ffce15d881e01f8adefa86 + last_write_checksum: sha1:ffdd11a4945dd805c9a73328749c2f4d9b6f80e6 + pristine_git_object: edd0fdc74a1b81f458d6083e79dc393e488da36a src/mistralai/azure/client/models/chatcompletionresponse.py: id: bdfacf065e9e last_write_checksum: sha1:c72fb624e7475a551d37e0b291b64bcf772c402a pristine_git_object: d41f9c6fab670cf7c961f50b1302f9a88cf48162 src/mistralai/azure/client/models/chatcompletionstreamrequest.py: id: da00a7feb4ef - last_write_checksum: sha1:c8c84c818b3b22bfec1e7f5737bbb281088dd3ba - pristine_git_object: be21eed2ecbe8354eb9a4bfa48122b28dada4aaf + last_write_checksum: sha1:8bb36693fed73a50d59687ca8b30a2c443708610 + pristine_git_object: 2edfbed98462eab43f322b9c706721365e410bb9 src/mistralai/azure/client/models/completionchunk.py: id: 28d620f25510 - last_write_checksum: sha1:413545e0521539346bff6e77fdec0c9e383bde17 - pristine_git_object: b94284b2d9c29c25a2f8eaa02828e2a205f4407e + last_write_checksum: sha1:84d1c55ef7bdb438e7f536a604a070799d054281 + pristine_git_object: 0e64bbc8aa0293c9d763db56287f296909260c38 src/mistralai/azure/client/models/completionevent.py: id: a6f00a747933 last_write_checksum: sha1:3d04bfbdaf11c52af5613ed0fd70c8dbc59f6d49 pristine_git_object: c4b272871d9b3ea8443f469d29b0825706c25c00 src/mistralai/azure/client/models/completionresponsestreamchoice.py: id: 3ba5d7ba8a13 - last_write_checksum: sha1:f917300daf4febec7661f2c73bae675600ee0bdd - pristine_git_object: 2a4d053feb84cf2a9675d76ae08c83945b26644c + last_write_checksum: sha1:4de311509c71c8f582b2c767febea89f1acd341a + pristine_git_object: 20a271401ff98d69525947ab929078af83aab1f1 src/mistralai/azure/client/models/contentchunk.py: id: 1f65e4f8f731 - last_write_checksum: sha1:79efbc90c1ae36b74492666125fb3e5ecaa5c27a - pristine_git_object: 0f09f76703efd95fcd96377b8ec6870d58dbf829 + last_write_checksum: sha1:cf11e1f061d3c8af040ebbdba0b25d4177e1cea4 + pristine_git_object: 17efcc7d5825461576cf61257908688cffd23eb7 src/mistralai/azure/client/models/deltamessage.py: id: b7dab1d158de - last_write_checksum: sha1:553fdff5a3aec6909417be3cb390d99421af1693 - pristine_git_object: 2c01feae56c44d256f1e579c15f08e167dcc6481 + last_write_checksum: sha1:190c2809d575244eda5efbb1e00a4ec5811aea29 + pristine_git_object: 567e772fc1b376efaec1a2dfd660bc74a916f8ee src/mistralai/azure/client/models/documenturlchunk.py: id: e56fec6e977f - last_write_checksum: sha1:a43cee08f935933bf715b2f1a82b4c746b591f35 - pristine_git_object: 345bafc2bfe3cc056d746cf8151cf53b68771414 + last_write_checksum: sha1:0313d94f343d46dac7cc3adc392feaf06fa2b2a4 + pristine_git_object: 2dea80056f6752bdaa5d00f391cb6f54371a9d2b src/mistralai/azure/client/models/filechunk.py: id: 150d9f180110 - last_write_checksum: sha1:df1e010006338f6dd37009f2547ab8f0b90b917a - pristine_git_object: 829f03d84c25dd859d514ffa26e570f235e4e75b + last_write_checksum: sha1:6d12d630a5bfd601836f9cb3d63b9eb2f15f880d + pristine_git_object: 6baa0cba81535e157c0f81ae2648362f7bd1adbd src/mistralai/azure/client/models/function.py: id: 6d1e2011a14b - last_write_checksum: sha1:62df160db82853d79907cccff4d0904f6bb9f142 - pristine_git_object: f4edce0fb8563f485d9a63a42439a9b2593a7f40 + last_write_checksum: sha1:b064eca9256966603581d41b5b2c08cd2448224d + pristine_git_object: 055d3657fd98da63b80deb8cd2054e95a0e66a2b src/mistralai/azure/client/models/functioncall.py: id: ced560a1bd57 last_write_checksum: sha1:490cb3a0305994de063e06fa4c77defa911271f3 @@ -432,150 +456,130 @@ trackedFiles: id: 6f09474ebc85 last_write_checksum: sha1:651ceed24416ce8192f70db03cc5cd0db685899f pristine_git_object: 839e0d557a902da6c819210962e38e1df9bda90f - src/mistralai/azure/client/models/httpvalidationerror.py: - id: ca155413681b - last_write_checksum: sha1:9dea33d9c74bbdf842ee9d157e4aaa05c36ae34a - pristine_git_object: 40bccddc4d0c0e761d70af713387561101e20b60 src/mistralai/azure/client/models/imagedetail.py: id: de211988043d last_write_checksum: sha1:812f2ec4fc0d8d13db643ed49192384d5a841aa4 pristine_git_object: 2d074cee614e1c49b69ee4073c3aaaa7a5a2c9e2 src/mistralai/azure/client/models/imageurl.py: id: c8882341c798 - last_write_checksum: sha1:443ee3739b3801928b4f3d4256531078fc4045e8 - pristine_git_object: b3c705e3f261ebd59f40e46785577694d80f98bf + last_write_checksum: sha1:8c3c08cc5d33c66b12539270b7edbf157d936f86 + pristine_git_object: bcb4fe43d334752be501d694543250d7e632a9c7 src/mistralai/azure/client/models/imageurlchunk.py: id: b6f0abb574d7 - last_write_checksum: sha1:4651f12f779bc86874c8516f06e39b882e414c92 - pristine_git_object: ee6de50f2add830c19d0b8b030a7c7a2ab65cb11 + last_write_checksum: sha1:417618d9d2aba85386a100dfe818d13342830526 + pristine_git_object: 7213c49846a4107271d017dd695648d98c2efa94 src/mistralai/azure/client/models/jsonschema.py: id: bfd486f4bb18 - last_write_checksum: sha1:ffe7190393086a4301aaffa6854cb3d80b0db92f - pristine_git_object: 5aaa490af350ac1c436dafb3d3c73d56402cac11 - src/mistralai/azure/client/models/mistralazureerror.py: - id: 31ed29254e67 - last_write_checksum: sha1:25f4411c7411faad753d46118edf74828b1c9f7c - pristine_git_object: c5bf17528c7cf25bac8f8874f58692c601fcdd76 + last_write_checksum: sha1:ccb2b53bd2351ec5119d9a7914a1a42c2746a096 + pristine_git_object: 99f2fb8903562465687edfd300d8efd373b92247 src/mistralai/azure/client/models/mistralpromptmode.py: id: d0028b1e4129 last_write_checksum: sha1:46fe1ab8ac2d5867877368a59a4aa5be2fabadeb pristine_git_object: 26e7adbdc4a981c92d51b72542c966b0ba0fb8f8 - src/mistralai/azure/client/models/no_response_error.py: - id: a956d6cd06f0 - last_write_checksum: sha1:7f326424a7d5ae1bcd5c89a0d6b3dbda9138942f - pristine_git_object: 1deab64bc43e1e65bf3c412d326a4032ce342366 src/mistralai/azure/client/models/ocrimageobject.py: id: 9c9f987d94bb - last_write_checksum: sha1:b86f5187d1c425ddf27ed4815657a7c41d71855c - pristine_git_object: 38e9d3e48df5cee8cdd0cd1d7b6df62182814104 + last_write_checksum: sha1:423effee97a4120a26ba78c2abe7f6adeb5c733d + pristine_git_object: a23515b346a0f9517fec0b2381e1b0c04cb31816 src/mistralai/azure/client/models/ocrpagedimensions.py: id: 7669a25f32b3 last_write_checksum: sha1:60642db6bb61f0e96204fb78d3aa0bd80dd0a7e5 pristine_git_object: 12858da92de99aa6da9d6e148df3ba7ee37496c7 src/mistralai/azure/client/models/ocrpageobject.py: id: eea193b05126 - last_write_checksum: sha1:baada584537b75e2e184738424068e61afe263c7 - pristine_git_object: 5fb821c19fd3cca2c2e149bd058a7ca49d2d002b + last_write_checksum: sha1:b8370ac0611dc3eccf09dddf85d1c39d3a11224b + pristine_git_object: 434c8988f124f93180e6cefa15b3aee067937946 src/mistralai/azure/client/models/ocrrequest.py: id: 365a5b4776a2 - last_write_checksum: sha1:9d3a9bccd341219934470688d3818557231b9b62 - pristine_git_object: fece2713166fc943194b7b38ec9b82db295bba0a + last_write_checksum: sha1:e684da1b6db18cb9c5ce95b9cc58556e05a9ea9b + pristine_git_object: a2cd341593c9db3644076d39352abca6815efc56 src/mistralai/azure/client/models/ocrresponse.py: id: b8cde8c16a4c - last_write_checksum: sha1:e6f08c68f0388919ca7bcbc4f0cb134525053fcd - pristine_git_object: 787289fa995ba6cbf4b2ef3d3c41edb31f656674 + last_write_checksum: sha1:55e81631f6fe57aaf58178460e1c5fc69fa19377 + pristine_git_object: 3dc09fd770a064e69e84519bd0f0c9127ebd8176 src/mistralai/azure/client/models/ocrtableobject.py: id: c2cd51b8789e - last_write_checksum: sha1:11052d42f0d91916f038437923ea656bf882032c - pristine_git_object: 3e3c25830a3216f4ef325f5b1056a0c1a267b090 + last_write_checksum: sha1:86a8fd2241cf6a636e81e58484a90bdb7880085e + pristine_git_object: f1de5428a71f9d42cd9f9e764d0bbf88f3aad8cc src/mistralai/azure/client/models/ocrusageinfo.py: id: 5e9118cac468 - last_write_checksum: sha1:6b27c09b5ec447c6ede22aa75190a1e06353349c - pristine_git_object: e2ceba35eb3f6e148389a7fd466dea5c051480a4 + last_write_checksum: sha1:97887b58cfe6ebd9ebd5905c6c7485525d6dc788 + pristine_git_object: f63315d23a1659aee4333b45c4239861aa5220d7 src/mistralai/azure/client/models/prediction.py: id: bd6abfa93083 - last_write_checksum: sha1:87eb3c43fa31b245c13c4708602b300956aa9efb - pristine_git_object: 6b8d6480b9ba1cb6683bdc93c24fb762ccfba146 + last_write_checksum: sha1:07d06d5629af183f999e043744a67868ef779bcc + pristine_git_object: 1fa1d78248628ccdc102ce0631d344150addfd2d src/mistralai/azure/client/models/referencechunk.py: id: c9612f854670 - last_write_checksum: sha1:b96507bcc82939fa4057532ef7e6a440baabd973 - pristine_git_object: e0bcb06be4d4c8d947ee267a9728aeae3a2c52fe + last_write_checksum: sha1:e81e758e00db915e68f58ffa1e03b2c473f64477 + pristine_git_object: f7af9bf9a73e0d782e5e6c6a7866af6fbc3668d8 src/mistralai/azure/client/models/responseformat.py: id: c124e7c316aa - last_write_checksum: sha1:f8c9e581053d1d885196c210a219a3e7aa086610 - pristine_git_object: 39fb03a25efdbc0a92ea91c72038ddd86ee056be + last_write_checksum: sha1:d368a2d4320356b6daab1dd0c62c6c862e902ca0 + pristine_git_object: 20fd2b868506cff278d1d7dc719eddd56ea538b0 src/mistralai/azure/client/models/responseformats.py: id: fef416cefcd4 last_write_checksum: sha1:a212e85d286b5b49219f57d071a2232ff8b5263b pristine_git_object: cbf83ce7b54ff8634f741334831807bfb5c98991 - src/mistralai/azure/client/models/responsevalidationerror.py: - id: afdb9463b434 - last_write_checksum: sha1:26f01befeb347a63928012e7eb36c95a8a392145 - pristine_git_object: cbdffcbba45a988805cdd52d111e77b0ca777dbf - src/mistralai/azure/client/models/sdkerror.py: - id: 4601c7297af7 - last_write_checksum: sha1:b54041f9751e1f2a38dd02a6f8eadb3907fa3df0 - pristine_git_object: a1e9aacaa2fcc839dcb2638788dd7c94298adee7 src/mistralai/azure/client/models/security.py: id: 4a2e4760ec08 last_write_checksum: sha1:0cd2ae54cecd88cfd8d43e92c0d3da7efa48942c pristine_git_object: 9b83ba98336090bed89fbeda40b4a07b212a1106 src/mistralai/azure/client/models/systemmessage.py: id: 8fa0dee9e4e1 - last_write_checksum: sha1:2b52c44b92a098b559ec8b7a80449532169cd317 - pristine_git_object: 38c280c809148e190e329619858718d132da6bc0 + last_write_checksum: sha1:26167db704ece6ef1391d6f474e00f417bff4639 + pristine_git_object: d4bd004476ef653798295fa5df9de68b607f0132 src/mistralai/azure/client/models/systemmessagecontentchunks.py: id: 5918e770869d - last_write_checksum: sha1:55529f2f29ba3087fbf117dbbe64e1dda92b2958 - pristine_git_object: 225f38b712f5f3c7abfd526cc8c0386687814f36 + last_write_checksum: sha1:d1f96498cbb540b91425e70ffa33892ff4d1c8cd + pristine_git_object: 8de71c909eda2ed0166a6be8f8ee029956e5766b src/mistralai/azure/client/models/textchunk.py: id: 9c81c76a6325 - last_write_checksum: sha1:d1c9eaffeb80299f023351dc8d07eb53e49133f2 - pristine_git_object: e513c1434cc7a4766bb9ef039ad8eed2bf0c12ca + last_write_checksum: sha1:28b8f4e030d365e5bf2f2f2720a7919b29616564 + pristine_git_object: 9295148588a143278ff5f48f9142347e35cfdab2 src/mistralai/azure/client/models/thinkchunk.py: id: df6bbd55b3eb - last_write_checksum: sha1:ec9af4cb7faa6ba8ed033b37db1d1d5a1406ac3f - pristine_git_object: e769399fe6ba90ddb2503f8fadb4b6cebc7d6f85 + last_write_checksum: sha1:752a81be169fdd7a6afc293cf090b2cd4d2b22c9 + pristine_git_object: 4e881aad3b11d43aecaab922fe55bf7b4076c42f src/mistralai/azure/client/models/tool.py: id: 4075ef72c086 - last_write_checksum: sha1:0c041eaa008ee1851e05bf90e57602c0338f362f - pristine_git_object: 169305bc4c538e88b1e0cf1120aa10e424118880 + last_write_checksum: sha1:4bef6d64b6426fdeff5031557c3c0e37f5c33b9a + pristine_git_object: 87329bdb73526120a3f63d48299114485a7fe038 src/mistralai/azure/client/models/toolcall.py: id: c65e6f79e539 - last_write_checksum: sha1:dd2290e019322e9df73b119e054a1d738eb5f3ba - pristine_git_object: a589b1b38ef4caaba2753f8335228bc16cd68961 + last_write_checksum: sha1:a3b36214b4533b79868630348762206a0e5ca26e + pristine_git_object: ada1ea65136fa58dce55f2857d895ea916bcd41f src/mistralai/azure/client/models/toolchoice.py: id: c25062b5de34 - last_write_checksum: sha1:db82f8d3f811461226cffbeacf2699103a5e0689 - pristine_git_object: 1f623222084f12eaa63f2cea656dc7da10b12a3a + last_write_checksum: sha1:6212c9366eb3b4f4062c86c00d4502dd03bf5ce1 + pristine_git_object: ddb9e1417c880c44a7f0505bfde839570fa3cd4a src/mistralai/azure/client/models/toolchoiceenum.py: id: cc06ba3a8d21 last_write_checksum: sha1:3dbba9a58c5569aafe115f3f7713a52b01ad8620 pristine_git_object: 01f6f677b379f9e3c99db9d1ad248cb0033a2804 src/mistralai/azure/client/models/toolmessage.py: id: 84ac736fa955 - last_write_checksum: sha1:11841bba4b66179321a35ea1a4d4d3571fa997b7 - pristine_git_object: a73fd6bf8355043f1b40caf7e8b9ded90c1fda0f + last_write_checksum: sha1:e4ed14906985fe74fd76a9adb09125ebc1218a1f + pristine_git_object: 670210de0d05b52ee9dffbbb808a87e67c2d37a9 src/mistralai/azure/client/models/tooltypes.py: id: fa881b046d34 last_write_checksum: sha1:cd28ddc02fff9a5abbb59c82fe9e0dcbdb9b6d2a pristine_git_object: 1cce7446f2772b998208ea1c78c7969e3881d5d0 src/mistralai/azure/client/models/usageinfo.py: id: 3edc9c81b329 - last_write_checksum: sha1:0b2117285b13d846a25c6c59436c4628b9d84a03 - pristine_git_object: 19a6b09fb63a3732719c45f8dfca92cfc2c57353 + last_write_checksum: sha1:0ac2350e4efa1ed3ffd7d33ac91c3ef564d1d773 + pristine_git_object: 0f04c87c97ff3148106408a46618c848b86c4b37 src/mistralai/azure/client/models/usermessage.py: id: 3796508adc07 - last_write_checksum: sha1:f4baa9d8b8f99f715873cea83191baf055c3296a - pristine_git_object: 96439c64a979ac3edf8900d39154d706846a3a95 + last_write_checksum: sha1:8eb35fb07971d74cf2cb0858c037558f52df6aa9 + pristine_git_object: 549b01ca887651a95c5efc8aff3372d32dfdc277 src/mistralai/azure/client/models/validationerror.py: id: f2b84813e2ea last_write_checksum: sha1:f0f9706a5af2ac4f6b234e768fdd492bbdd8a18c pristine_git_object: 817ecf7a56470369ccacd0f5e0bb739656a5f92c src/mistralai/azure/client/ocr.py: id: 5817c10c9297 - last_write_checksum: sha1:24fec22877024154ea417e31ea443b4795c443ba - pristine_git_object: 098e764b6580e35ad0e81242ca601ce821656ee9 + last_write_checksum: sha1:7666ca9f4596cee080952b2f4096bd4176051680 + pristine_git_object: b9270f6a52406d8a9bf02d90c24ae540da6dfb9d src/mistralai/azure/client/py.typed: id: e88369f116d2 last_write_checksum: sha1:8efc425ffe830805ffcc0f3055871bdcdc542c60 @@ -594,8 +598,8 @@ trackedFiles: pristine_git_object: a9a640a1a7048736383f96c67c6290c86bf536ee src/mistralai/azure/client/utils/__init__.py: id: 26f1a707325b - last_write_checksum: sha1:887f56a717845fab7445cc368d2a17d850c3565a - pristine_git_object: 05f26ade57efb8c54a774fbcb939fb1a7dc655ce + last_write_checksum: sha1:3ad22a588864c93bd3a16605f669955b5f3b8053 + pristine_git_object: b488c2df1390b22be3050eee72832a91c76d5385 src/mistralai/azure/client/utils/annotations.py: id: bb1f6c189fdb last_write_checksum: sha1:a4824ad65f730303e4e1e3ec1febf87b4eb46dbc @@ -604,18 +608,22 @@ trackedFiles: id: 2b7db09ee0ab last_write_checksum: sha1:c721e4123000e7dc61ec52b28a739439d9e17341 pristine_git_object: a6c52cd61bbe2d459046c940ce5e8c469f2f0664 + src/mistralai/azure/client/utils/dynamic_imports.py: + id: 0ac779c122d9 + last_write_checksum: sha1:a1940c63feb8eddfd8026de53384baf5056d5dcc + pristine_git_object: 673edf82a97d0fea7295625d3e092ea369a36b79 src/mistralai/azure/client/utils/enums.py: id: ffbdb1917a68 last_write_checksum: sha1:bc8c3c1285ae09ba8a094ee5c3d9c7f41fa1284d pristine_git_object: 3324e1bc2668c54c4d5f5a1a845675319757a828 src/mistralai/azure/client/utils/eventstreaming.py: id: bdc37b70360c - last_write_checksum: sha1:bababae5d54b7efc360db701daa49e18a92c2f3b - pristine_git_object: 0969899bfc491e5e408d05643525f347ea95e4fc + last_write_checksum: sha1:ffa870a25a7e4e2015bfd7a467ccd3aa1de97f0e + pristine_git_object: f2052fc22d9fd6c663ba3dce019fe234ca37108b src/mistralai/azure/client/utils/forms.py: id: 51696122c557 - last_write_checksum: sha1:15fa7e9ab1611e062a9984cf06cb20969713d295 - pristine_git_object: f961e76beaf0a8b1fe0dda44754a74eebd3608e7 + last_write_checksum: sha1:0ca31459b99f761fcc6d0557a0a38daac4ad50f4 + pristine_git_object: 1e550bd5c2c35d977ddc10f49d77c23cb12c158d src/mistralai/azure/client/utils/headers.py: id: e42840c8cb13 last_write_checksum: sha1:7c6df233ee006332b566a8afa9ce9a245941d935 @@ -638,20 +646,24 @@ trackedFiles: pristine_git_object: 1de32b6d26f46590232f398fdba6ce0072f1659c src/mistralai/azure/client/utils/retries.py: id: d50ed6e400b2 - last_write_checksum: sha1:5b97ac4f59357d70c2529975d50364c88bcad607 - pristine_git_object: 88a91b10cd2076b4a2c6cff2ac6bfaa5e3c5ad13 + last_write_checksum: sha1:471372f5c5d1dd5583239c9cf3c75f1b636e5d87 + pristine_git_object: af07d4e941007af4213c5ec9047ef8a2fca04e5e src/mistralai/azure/client/utils/security.py: id: 1d35741ce5f1 - last_write_checksum: sha1:a17130ace2c0db6394f38dd941ad2b700cc755c8 - pristine_git_object: 295a3f40031dbb40073ad227fd4a355660f97ab2 + last_write_checksum: sha1:435dd8b180cefcd733e635b9fa45512da091d9c0 + pristine_git_object: 17996bd54b8624009802fbbdf30bcb4225b8dfed src/mistralai/azure/client/utils/serializers.py: id: a1f26d73c3ad last_write_checksum: sha1:ce1d8d7f500a9ccba0aeca5057cee9c271f4dfd7 pristine_git_object: 14321eb479de81d0d9580ec8291e0ff91bf29e57 + src/mistralai/azure/client/utils/unions.py: + id: 9abcc9913e3f + last_write_checksum: sha1:6e38049f323e0b5fb4bd0e88ab51ec447197ccb0 + pristine_git_object: a227f4e87be22fce682fcae5813b71835199ec5e src/mistralai/azure/client/utils/unmarshal_json_response.py: id: 947f4fc4db62 - last_write_checksum: sha1:99bd357d24d2236e3974630d9bd18bae22610cbc - pristine_git_object: 5317ac87097ccb35628202cf7fc5cb21e186855f + last_write_checksum: sha1:75931131ff498a66a48cfb32dd9d5d61f2c9b4d1 + pristine_git_object: fe0c9b8ecabf8f89e363a050837582df40d67fb4 src/mistralai/azure/client/utils/url.py: id: 4976c88d0e3b last_write_checksum: sha1:6479961baa90432ca25626f8e40a7bbc32e73b41 diff --git a/packages/azure/.speakeasy/gen.yaml b/packages/azure/.speakeasy/gen.yaml index 729cdfcf..0b7262e0 100644 --- a/packages/azure/.speakeasy/gen.yaml +++ b/packages/azure/.speakeasy/gen.yaml @@ -13,8 +13,9 @@ generation: requestResponseComponentNamesFeb2024: true securityFeb2025: true sharedErrorComponentsApr2025: true - methodSignaturesApr2024: true sharedNestedComponentsJan2026: true + nameOverrideFeb2026: true + methodSignaturesApr2024: true auth: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false @@ -22,31 +23,37 @@ generation: schemas: allOfMergeStrategy: shallowMerge requestBodyFieldName: "" + versioningStrategy: automatic persistentEdits: {} tests: generateTests: true generateNewTests: false skipResponseBodyAssertions: false python: - version: 2.0.0a4 + version: 2.0.0-a4.1 additionalDependencies: dev: pytest: ^8.2.2 pytest-asyncio: ^0.23.7 + main: {} allowedRedefinedBuiltins: - id - object + - input + - dir asyncMode: both authors: - Mistral baseErrorName: MistralAzureError clientServerStatusCodesAsErrors: true - constFieldCasing: upper + constFieldCasing: normal defaultErrorName: SDKError description: Python Client SDK for the Mistral AI API in Azure. enableCustomCodeRegions: false enumFormat: union fixFlags: + asyncPaginationSep2025: true + conflictResistantModelImportsFeb2026: true responseRequiredSep2024: true flatAdditionalProperties: true flattenGlobalSecurity: true @@ -58,17 +65,17 @@ python: option: openapi paths: callbacks: "" - errors: "" + errors: errors operations: "" shared: "" webhooks: "" inferUnionDiscriminators: true inputModelSuffix: input license: "" - maxMethodParams: 15 + maxMethodParams: 999 methodArguments: infer-optional-args moduleName: mistralai.azure.client - multipartArrayFormat: legacy + multipartArrayFormat: standard outputModelSuffix: output packageManager: uv packageName: mistralai-azure @@ -78,3 +85,4 @@ python: responseFormat: flat sseFlatResponse: false templateVersion: v2 + useAsyncHooks: false diff --git a/packages/azure/docs/models/httpvalidationerror.md b/packages/azure/docs/errors/httpvalidationerror.md similarity index 100% rename from packages/azure/docs/models/httpvalidationerror.md rename to packages/azure/docs/errors/httpvalidationerror.md diff --git a/packages/azure/docs/models/chatcompletionrequest.md b/packages/azure/docs/models/chatcompletionrequest.md index 3b0f7270..f8715cd0 100644 --- a/packages/azure/docs/models/chatcompletionrequest.md +++ b/packages/azure/docs/models/chatcompletionrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionRequestMessage](../models/chatcompletionrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionRequestToolChoice]](../models/chatcompletionrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/packages/azure/docs/models/chatcompletionstreamrequest.md b/packages/azure/docs/models/chatcompletionstreamrequest.md index f78156a6..cc82a8c7 100644 --- a/packages/azure/docs/models/chatcompletionstreamrequest.md +++ b/packages/azure/docs/models/chatcompletionstreamrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionStreamRequestMessage](../models/chatcompletionstreamrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionStreamRequestToolChoice]](../models/chatcompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/packages/azure/docs/models/ocrrequest.md b/packages/azure/docs/models/ocrrequest.md index 87929e53..2d26c19f 100644 --- a/packages/azure/docs/models/ocrrequest.md +++ b/packages/azure/docs/models/ocrrequest.md @@ -3,18 +3,18 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | -| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | -| `document` | [models.Document](../models/document.md) | :heavy_check_mark: | Document to run OCR on | | -| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | -| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | -| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | -| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | -| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | {
"type": "text"
} | -| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | -| `table_format` | [OptionalNullable[models.TableFormat]](../models/tableformat.md) | :heavy_minus_sign: | N/A | | -| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | -| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model` | *Nullable[str]* | :heavy_check_mark: | N/A | | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `document` | [models.Document](../models/document.md) | :heavy_check_mark: | Document to run OCR on | | +| `pages` | List[*int*] | :heavy_minus_sign: | Specific pages user wants to process in various formats: single number, range, or list of both. Starts from 0 | | +| `include_image_base64` | *OptionalNullable[bool]* | :heavy_minus_sign: | Include image URLs in response | | +| `image_limit` | *OptionalNullable[int]* | :heavy_minus_sign: | Max images to extract | | +| `image_min_size` | *OptionalNullable[int]* | :heavy_minus_sign: | Minimum height and width of image to extract | | +| `bbox_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_format` | [OptionalNullable[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | +| `document_annotation_prompt` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional prompt to guide the model in extracting structured output from the entire document. A document_annotation_format must be provided. | | +| `table_format` | [OptionalNullable[models.TableFormat]](../models/tableformat.md) | :heavy_minus_sign: | N/A | | +| `extract_header` | *Optional[bool]* | :heavy_minus_sign: | N/A | | +| `extract_footer` | *Optional[bool]* | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/packages/azure/pylintrc b/packages/azure/pylintrc index a8fcb932..0391ac11 100644 --- a/packages/azure/pylintrc +++ b/packages/azure/pylintrc @@ -89,7 +89,7 @@ persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. -py-version=3.9 +py-version=3.10 # Discover python modules and packages in the file system subtree. recursive=no @@ -459,7 +459,8 @@ disable=raw-checker-failed, consider-using-with, wildcard-import, unused-wildcard-import, - too-many-return-statements + too-many-return-statements, + redefined-builtin # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -641,7 +642,7 @@ additional-builtins= allow-global-unused-variables=yes # List of names allowed to shadow builtins -allowed-redefined-builtins=id,object +allowed-redefined-builtins=id,object,input,dir # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. diff --git a/packages/azure/src/mistralai/azure/client/__init__.py b/packages/azure/src/mistralai/azure/client/__init__.py index dd02e42e..833c68cd 100644 --- a/packages/azure/src/mistralai/azure/client/__init__.py +++ b/packages/azure/src/mistralai/azure/client/__init__.py @@ -9,7 +9,6 @@ ) from .sdk import * from .sdkconfiguration import * -from .models import * VERSION: str = __version__ diff --git a/packages/azure/src/mistralai/azure/client/_version.py b/packages/azure/src/mistralai/azure/client/_version.py index 4448d2a0..4f985cc6 100644 --- a/packages/azure/src/mistralai/azure/client/_version.py +++ b/packages/azure/src/mistralai/azure/client/_version.py @@ -3,10 +3,10 @@ import importlib.metadata __title__: str = "mistralai-azure" -__version__: str = "2.0.0a4" +__version__: str = "2.0.0-a4.1" __openapi_doc_version__: str = "1.0.0" -__gen_version__: str = "2.794.1" -__user_agent__: str = "speakeasy-sdk/python 2.0.0a4 2.794.1 1.0.0 mistralai-azure" +__gen_version__: str = "2.841.0" +__user_agent__: str = "speakeasy-sdk/python 2.0.0-a4.1 2.841.0 1.0.0 mistralai-azure" try: if __package__ is not None: diff --git a/packages/azure/src/mistralai/azure/client/basesdk.py b/packages/azure/src/mistralai/azure/client/basesdk.py index b0391ac0..0d4d9a44 100644 --- a/packages/azure/src/mistralai/azure/client/basesdk.py +++ b/packages/azure/src/mistralai/azure/client/basesdk.py @@ -2,7 +2,7 @@ from .sdkconfiguration import SDKConfiguration import httpx -from mistralai.azure.client import models, utils +from mistralai.azure.client import errors, utils from mistralai.azure.client._hooks import ( AfterErrorContext, AfterSuccessContext, @@ -12,6 +12,7 @@ RetryConfig, SerializedRequestBody, get_body_content, + run_sync_in_thread, ) from typing import Callable, List, Mapping, Optional, Tuple from urllib.parse import parse_qs, urlparse @@ -264,7 +265,7 @@ def do(): if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -285,7 +286,7 @@ def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -315,7 +316,10 @@ async def do_request_async( async def do(): http_res = None try: - req = hooks.before_request(BeforeRequestContext(hook_ctx), request) + req = await run_sync_in_thread( + hooks.before_request, BeforeRequestContext(hook_ctx), request + ) + logger.debug( "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", req.method, @@ -329,14 +333,17 @@ async def do(): http_res = await client.send(req, stream=stream) except Exception as e: - _, e = hooks.after_error(AfterErrorContext(hook_ctx), None, e) + _, e = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), None, e + ) + if e is not None: logger.debug("Request Exception", exc_info=True) raise e if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -347,9 +354,10 @@ async def do(): ) if utils.match_status_codes(error_status_codes, http_res.status_code): - result, err = hooks.after_error( - AfterErrorContext(hook_ctx), http_res, None + result, err = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), http_res, None ) + if err is not None: logger.debug("Request Exception", exc_info=True) raise err @@ -357,7 +365,7 @@ async def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -369,6 +377,8 @@ async def do(): http_res = await do() if not utils.match_status_codes(error_status_codes, http_res.status_code): - http_res = hooks.after_success(AfterSuccessContext(hook_ctx), http_res) + http_res = await run_sync_in_thread( + hooks.after_success, AfterSuccessContext(hook_ctx), http_res + ) return http_res diff --git a/packages/azure/src/mistralai/azure/client/chat.py b/packages/azure/src/mistralai/azure/client/chat.py index 3348bf47..1051f952 100644 --- a/packages/azure/src/mistralai/azure/client/chat.py +++ b/packages/azure/src/mistralai/azure/client/chat.py @@ -1,7 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from .basesdk import BaseSDK -from mistralai.azure.client import models, utils +from mistralai.azure.client import errors, models, utils from mistralai.azure.client._hooks import HookContext from mistralai.azure.client.types import OptionalNullable, UNSET from mistralai.azure.client.utils import eventstreaming @@ -179,18 +179,18 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, @@ -359,18 +359,18 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def complete( self, @@ -530,17 +530,17 @@ def complete( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, @@ -700,14 +700,14 @@ async def complete_async( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/packages/azure/src/mistralai/azure/client/errors/__init__.py b/packages/azure/src/mistralai/azure/client/errors/__init__.py new file mode 100644 index 00000000..79e2712c --- /dev/null +++ b/packages/azure/src/mistralai/azure/client/errors/__init__.py @@ -0,0 +1,39 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .mistralazureerror import MistralAzureError +from typing import Any, TYPE_CHECKING + +from mistralai.azure.client.utils.dynamic_imports import lazy_getattr, lazy_dir + +if TYPE_CHECKING: + from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData + from .no_response_error import NoResponseError + from .responsevalidationerror import ResponseValidationError + from .sdkerror import SDKError + +__all__ = [ + "HTTPValidationError", + "HTTPValidationErrorData", + "MistralAzureError", + "NoResponseError", + "ResponseValidationError", + "SDKError", +] + +_dynamic_imports: dict[str, str] = { + "HTTPValidationError": ".httpvalidationerror", + "HTTPValidationErrorData": ".httpvalidationerror", + "NoResponseError": ".no_response_error", + "ResponseValidationError": ".responsevalidationerror", + "SDKError": ".sdkerror", +} + + +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) + + +def __dir__(): + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/azure/src/mistralai/azure/client/models/httpvalidationerror.py b/packages/azure/src/mistralai/azure/client/errors/httpvalidationerror.py similarity index 76% rename from packages/azure/src/mistralai/azure/client/models/httpvalidationerror.py rename to packages/azure/src/mistralai/azure/client/errors/httpvalidationerror.py index 40bccddc..b4f2691e 100644 --- a/packages/azure/src/mistralai/azure/client/models/httpvalidationerror.py +++ b/packages/azure/src/mistralai/azure/client/errors/httpvalidationerror.py @@ -1,16 +1,16 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from .validationerror import ValidationError from dataclasses import dataclass, field import httpx -from mistralai.azure.client.models import MistralAzureError +from mistralai.azure.client.errors import MistralAzureError +from mistralai.azure.client.models import validationerror as models_validationerror from mistralai.azure.client.types import BaseModel from typing import List, Optional class HTTPValidationErrorData(BaseModel): - detail: Optional[List[ValidationError]] = None + detail: Optional[List[models_validationerror.ValidationError]] = None @dataclass(unsafe_hash=True) diff --git a/packages/azure/src/mistralai/azure/client/models/mistralazureerror.py b/packages/azure/src/mistralai/azure/client/errors/mistralazureerror.py similarity index 100% rename from packages/azure/src/mistralai/azure/client/models/mistralazureerror.py rename to packages/azure/src/mistralai/azure/client/errors/mistralazureerror.py diff --git a/packages/azure/src/mistralai/azure/client/models/no_response_error.py b/packages/azure/src/mistralai/azure/client/errors/no_response_error.py similarity index 100% rename from packages/azure/src/mistralai/azure/client/models/no_response_error.py rename to packages/azure/src/mistralai/azure/client/errors/no_response_error.py diff --git a/packages/azure/src/mistralai/azure/client/models/responsevalidationerror.py b/packages/azure/src/mistralai/azure/client/errors/responsevalidationerror.py similarity index 92% rename from packages/azure/src/mistralai/azure/client/models/responsevalidationerror.py rename to packages/azure/src/mistralai/azure/client/errors/responsevalidationerror.py index cbdffcbb..02397334 100644 --- a/packages/azure/src/mistralai/azure/client/models/responsevalidationerror.py +++ b/packages/azure/src/mistralai/azure/client/errors/responsevalidationerror.py @@ -4,7 +4,7 @@ from typing import Optional from dataclasses import dataclass -from mistralai.azure.client.models import MistralAzureError +from mistralai.azure.client.errors import MistralAzureError @dataclass(unsafe_hash=True) diff --git a/packages/azure/src/mistralai/azure/client/models/sdkerror.py b/packages/azure/src/mistralai/azure/client/errors/sdkerror.py similarity index 95% rename from packages/azure/src/mistralai/azure/client/models/sdkerror.py rename to packages/azure/src/mistralai/azure/client/errors/sdkerror.py index a1e9aaca..c4f3616c 100644 --- a/packages/azure/src/mistralai/azure/client/models/sdkerror.py +++ b/packages/azure/src/mistralai/azure/client/errors/sdkerror.py @@ -4,7 +4,7 @@ from typing import Optional from dataclasses import dataclass -from mistralai.azure.client.models import MistralAzureError +from mistralai.azure.client.errors import MistralAzureError MAX_MESSAGE_LEN = 10_000 diff --git a/packages/azure/src/mistralai/azure/client/models/__init__.py b/packages/azure/src/mistralai/azure/client/models/__init__.py index 51db6a38..908dda32 100644 --- a/packages/azure/src/mistralai/azure/client/models/__init__.py +++ b/packages/azure/src/mistralai/azure/client/models/__init__.py @@ -1,10 +1,8 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from .mistralazureerror import MistralAzureError -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING + +from mistralai.azure.client.utils.dynamic_imports import lazy_getattr, lazy_dir if TYPE_CHECKING: from .assistantmessage import ( @@ -49,7 +47,7 @@ CompletionResponseStreamChoiceFinishReason, CompletionResponseStreamChoiceTypedDict, ) - from .contentchunk import ContentChunk, ContentChunkTypedDict + from .contentchunk import ContentChunk, ContentChunkTypedDict, UnknownContentChunk from .deltamessage import ( DeltaMessage, DeltaMessageContent, @@ -66,7 +64,6 @@ FunctionCallTypedDict, ) from .functionname import FunctionName, FunctionNameTypedDict - from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData from .imagedetail import ImageDetail from .imageurl import ImageURL, ImageURLTypedDict from .imageurlchunk import ( @@ -77,7 +74,6 @@ ) from .jsonschema import JSONSchema, JSONSchemaTypedDict from .mistralpromptmode import MistralPromptMode - from .no_response_error import NoResponseError from .ocrimageobject import OCRImageObject, OCRImageObjectTypedDict from .ocrpagedimensions import OCRPageDimensions, OCRPageDimensionsTypedDict from .ocrpageobject import OCRPageObject, OCRPageObjectTypedDict @@ -95,8 +91,6 @@ from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .responseformat import ResponseFormat, ResponseFormatTypedDict from .responseformats import ResponseFormats - from .responsevalidationerror import ResponseValidationError - from .sdkerror import SDKError from .security import Security, SecurityTypedDict from .systemmessage import ( SystemMessage, @@ -189,8 +183,6 @@ "FunctionName", "FunctionNameTypedDict", "FunctionTypedDict", - "HTTPValidationError", - "HTTPValidationErrorData", "ImageDetail", "ImageURL", "ImageURLChunk", @@ -202,9 +194,7 @@ "JSONSchemaTypedDict", "Loc", "LocTypedDict", - "MistralAzureError", "MistralPromptMode", - "NoResponseError", "OCRImageObject", "OCRImageObjectTypedDict", "OCRPageDimensions", @@ -226,8 +216,6 @@ "ResponseFormat", "ResponseFormatTypedDict", "ResponseFormats", - "ResponseValidationError", - "SDKError", "Security", "SecurityTypedDict", "SystemMessage", @@ -255,6 +243,7 @@ "ToolMessageTypedDict", "ToolTypedDict", "ToolTypes", + "UnknownContentChunk", "UsageInfo", "UsageInfoTypedDict", "UserMessage", @@ -300,6 +289,7 @@ "CompletionResponseStreamChoiceTypedDict": ".completionresponsestreamchoice", "ContentChunk": ".contentchunk", "ContentChunkTypedDict": ".contentchunk", + "UnknownContentChunk": ".contentchunk", "DeltaMessage": ".deltamessage", "DeltaMessageContent": ".deltamessage", "DeltaMessageContentTypedDict": ".deltamessage", @@ -316,8 +306,6 @@ "FunctionCallTypedDict": ".functioncall", "FunctionName": ".functionname", "FunctionNameTypedDict": ".functionname", - "HTTPValidationError": ".httpvalidationerror", - "HTTPValidationErrorData": ".httpvalidationerror", "ImageDetail": ".imagedetail", "ImageURL": ".imageurl", "ImageURLTypedDict": ".imageurl", @@ -328,7 +316,6 @@ "JSONSchema": ".jsonschema", "JSONSchemaTypedDict": ".jsonschema", "MistralPromptMode": ".mistralpromptmode", - "NoResponseError": ".no_response_error", "OCRImageObject": ".ocrimageobject", "OCRImageObjectTypedDict": ".ocrimageobject", "OCRPageDimensions": ".ocrpagedimensions", @@ -354,8 +341,6 @@ "ResponseFormat": ".responseformat", "ResponseFormatTypedDict": ".responseformat", "ResponseFormats": ".responseformats", - "ResponseValidationError": ".responsevalidationerror", - "SDKError": ".sdkerror", "Security": ".security", "SecurityTypedDict": ".security", "SystemMessage": ".systemmessage", @@ -395,39 +380,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"No {attr_name} found in _dynamic_imports for module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - result = getattr(module, attr_name) - return result - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/azure/src/mistralai/azure/client/models/assistantmessage.py b/packages/azure/src/mistralai/azure/client/models/assistantmessage.py index f5793f94..e9ae6e82 100644 --- a/packages/azure/src/mistralai/azure/client/models/assistantmessage.py +++ b/packages/azure/src/mistralai/azure/client/models/assistantmessage.py @@ -37,7 +37,7 @@ class AssistantMessageTypedDict(TypedDict): class AssistantMessage(BaseModel): - ROLE: Annotated[ + role: Annotated[ Annotated[ Optional[Literal["assistant"]], AfterValidator(validate_const("assistant")) ], @@ -53,30 +53,31 @@ class AssistantMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["role", "content", "tool_calls", "prefix"] - nullable_fields = ["content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls", "prefix"]) + nullable_fields = set(["content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AssistantMessage.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/chatcompletionrequest.py b/packages/azure/src/mistralai/azure/client/models/chatcompletionrequest.py index 92179095..edd0fdc7 100644 --- a/packages/azure/src/mistralai/azure/client/models/chatcompletionrequest.py +++ b/packages/azure/src/mistralai/azure/client/models/chatcompletionrequest.py @@ -170,57 +170,56 @@ class ChatCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "model", - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - "safe_prompt", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "model", + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + "safe_prompt", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/chatcompletionstreamrequest.py b/packages/azure/src/mistralai/azure/client/models/chatcompletionstreamrequest.py index be21eed2..2edfbed9 100644 --- a/packages/azure/src/mistralai/azure/client/models/chatcompletionstreamrequest.py +++ b/packages/azure/src/mistralai/azure/client/models/chatcompletionstreamrequest.py @@ -168,57 +168,56 @@ class ChatCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "model", - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - "safe_prompt", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "model", + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + "safe_prompt", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/completionchunk.py b/packages/azure/src/mistralai/azure/client/models/completionchunk.py index b94284b2..0e64bbc8 100644 --- a/packages/azure/src/mistralai/azure/client/models/completionchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/completionchunk.py @@ -6,7 +6,8 @@ CompletionResponseStreamChoiceTypedDict, ) from .usageinfo import UsageInfo, UsageInfoTypedDict -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import List, Optional from typing_extensions import NotRequired, TypedDict @@ -32,3 +33,19 @@ class CompletionChunk(BaseModel): created: Optional[int] = None usage: Optional[UsageInfo] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "created", "usage"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/azure/src/mistralai/azure/client/models/completionresponsestreamchoice.py b/packages/azure/src/mistralai/azure/client/models/completionresponsestreamchoice.py index 2a4d053f..20a27140 100644 --- a/packages/azure/src/mistralai/azure/client/models/completionresponsestreamchoice.py +++ b/packages/azure/src/mistralai/azure/client/models/completionresponsestreamchoice.py @@ -39,30 +39,14 @@ class CompletionResponseStreamChoice(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["finish_reason"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/contentchunk.py b/packages/azure/src/mistralai/azure/client/models/contentchunk.py index 0f09f767..17efcc7d 100644 --- a/packages/azure/src/mistralai/azure/client/models/contentchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/contentchunk.py @@ -4,9 +4,12 @@ from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from mistralai.azure.client.utils import get_discriminator -from pydantic import Discriminator, Tag -from typing import Union +from functools import partial +from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union from typing_extensions import Annotated, TypeAliasType @@ -16,11 +19,32 @@ ) +class UnknownContentChunk(BaseModel): + r"""A ContentChunk variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_CONTENT_CHUNK_VARIANTS: dict[str, Any] = { + "image_url": ImageURLChunk, + "text": TextChunk, + "reference": ReferenceChunk, +} + + ContentChunk = Annotated[ - Union[ - Annotated[ImageURLChunk, Tag("image_url")], - Annotated[TextChunk, Tag("text")], - Annotated[ReferenceChunk, Tag("reference")], - ], - Discriminator(lambda m: get_discriminator(m, "type", "type")), + Union[ImageURLChunk, TextChunk, ReferenceChunk, UnknownContentChunk], + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_CONTENT_CHUNK_VARIANTS, + unknown_cls=UnknownContentChunk, + union_name="ContentChunk", + ) + ), ] diff --git a/packages/azure/src/mistralai/azure/client/models/deltamessage.py b/packages/azure/src/mistralai/azure/client/models/deltamessage.py index 2c01feae..567e772f 100644 --- a/packages/azure/src/mistralai/azure/client/models/deltamessage.py +++ b/packages/azure/src/mistralai/azure/client/models/deltamessage.py @@ -40,30 +40,25 @@ class DeltaMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["role", "content", "tool_calls"] - nullable_fields = ["role", "content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls"]) + nullable_fields = set(["role", "content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/documenturlchunk.py b/packages/azure/src/mistralai/azure/client/models/documenturlchunk.py index 345bafc2..2dea8005 100644 --- a/packages/azure/src/mistralai/azure/client/models/documenturlchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/documenturlchunk.py @@ -26,7 +26,7 @@ class DocumentURLChunkTypedDict(TypedDict): class DocumentURLChunk(BaseModel): document_url: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["document_url"]], AfterValidator(validate_const("document_url")), @@ -39,30 +39,31 @@ class DocumentURLChunk(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "document_name"] - nullable_fields = ["document_name"] - null_default_fields = [] - + optional_fields = set(["type", "document_name"]) + nullable_fields = set(["document_name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m + + +try: + DocumentURLChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/filechunk.py b/packages/azure/src/mistralai/azure/client/models/filechunk.py index 829f03d8..6baa0cba 100644 --- a/packages/azure/src/mistralai/azure/client/models/filechunk.py +++ b/packages/azure/src/mistralai/azure/client/models/filechunk.py @@ -1,9 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL from mistralai.azure.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, TypedDict @@ -17,7 +18,29 @@ class FileChunkTypedDict(TypedDict): class FileChunk(BaseModel): file_id: str - TYPE: Annotated[ + type: Annotated[ Annotated[Optional[Literal["file"]], AfterValidator(validate_const("file"))], pydantic.Field(alias="type"), ] = "file" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + FileChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/function.py b/packages/azure/src/mistralai/azure/client/models/function.py index f4edce0f..055d3657 100644 --- a/packages/azure/src/mistralai/azure/client/models/function.py +++ b/packages/azure/src/mistralai/azure/client/models/function.py @@ -1,7 +1,8 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Any, Dict, Optional from typing_extensions import NotRequired, TypedDict @@ -21,3 +22,19 @@ class Function(BaseModel): description: Optional[str] = None strict: Optional[bool] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["description", "strict"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/azure/src/mistralai/azure/client/models/imageurl.py b/packages/azure/src/mistralai/azure/client/models/imageurl.py index b3c705e3..bcb4fe43 100644 --- a/packages/azure/src/mistralai/azure/client/models/imageurl.py +++ b/packages/azure/src/mistralai/azure/client/models/imageurl.py @@ -25,30 +25,25 @@ class ImageURL(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["detail"] - nullable_fields = ["detail"] - null_default_fields = [] - + optional_fields = set(["detail"]) + nullable_fields = set(["detail"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/imageurlchunk.py b/packages/azure/src/mistralai/azure/client/models/imageurlchunk.py index ee6de50f..7213c498 100644 --- a/packages/azure/src/mistralai/azure/client/models/imageurlchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/imageurlchunk.py @@ -2,9 +2,10 @@ from __future__ import annotations from .imageurl import ImageURL, ImageURLTypedDict -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL from mistralai.azure.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union from typing_extensions import Annotated, TypeAliasType, TypedDict @@ -30,9 +31,31 @@ class ImageURLChunk(BaseModel): image_url: ImageURLUnion - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["image_url"]], AfterValidator(validate_const("image_url")) ], pydantic.Field(alias="type"), ] = "image_url" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ImageURLChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/jsonschema.py b/packages/azure/src/mistralai/azure/client/models/jsonschema.py index 5aaa490a..99f2fb89 100644 --- a/packages/azure/src/mistralai/azure/client/models/jsonschema.py +++ b/packages/azure/src/mistralai/azure/client/models/jsonschema.py @@ -32,30 +32,31 @@ class JSONSchema(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["description", "strict"] - nullable_fields = ["description"] - null_default_fields = [] - + optional_fields = set(["description", "strict"]) + nullable_fields = set(["description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + JSONSchema.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/ocrimageobject.py b/packages/azure/src/mistralai/azure/client/models/ocrimageobject.py index 38e9d3e4..a23515b3 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrimageobject.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrimageobject.py @@ -53,37 +53,34 @@ class OCRImageObject(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["image_base64", "image_annotation"] - nullable_fields = [ - "top_left_x", - "top_left_y", - "bottom_right_x", - "bottom_right_y", - "image_base64", - "image_annotation", - ] - null_default_fields = [] - + optional_fields = set(["image_base64", "image_annotation"]) + nullable_fields = set( + [ + "top_left_x", + "top_left_y", + "bottom_right_x", + "bottom_right_y", + "image_base64", + "image_annotation", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/ocrpageobject.py b/packages/azure/src/mistralai/azure/client/models/ocrpageobject.py index 5fb821c1..434c8988 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrpageobject.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrpageobject.py @@ -62,30 +62,25 @@ class OCRPageObject(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["tables", "hyperlinks", "header", "footer"] - nullable_fields = ["header", "footer", "dimensions"] - null_default_fields = [] - + optional_fields = set(["tables", "hyperlinks", "header", "footer"]) + nullable_fields = set(["header", "footer", "dimensions"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/ocrrequest.py b/packages/azure/src/mistralai/azure/client/models/ocrrequest.py index fece2713..a2cd3415 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrrequest.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrrequest.py @@ -95,52 +95,51 @@ class OCRRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "id", - "pages", - "include_image_base64", - "image_limit", - "image_min_size", - "bbox_annotation_format", - "document_annotation_format", - "document_annotation_prompt", - "table_format", - "extract_header", - "extract_footer", - ] - nullable_fields = [ - "model", - "pages", - "include_image_base64", - "image_limit", - "image_min_size", - "bbox_annotation_format", - "document_annotation_format", - "document_annotation_prompt", - "table_format", - ] - null_default_fields = [] - + optional_fields = set( + [ + "id", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + "document_annotation_prompt", + "table_format", + "extract_header", + "extract_footer", + ] + ) + nullable_fields = set( + [ + "model", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + "document_annotation_prompt", + "table_format", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/ocrresponse.py b/packages/azure/src/mistralai/azure/client/models/ocrresponse.py index 787289fa..3dc09fd7 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrresponse.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrresponse.py @@ -39,30 +39,25 @@ class OCRResponse(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["document_annotation"] - nullable_fields = ["document_annotation"] - null_default_fields = [] - + optional_fields = set(["document_annotation"]) + nullable_fields = set(["document_annotation"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/ocrtableobject.py b/packages/azure/src/mistralai/azure/client/models/ocrtableobject.py index 3e3c2583..f1de5428 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrtableobject.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrtableobject.py @@ -35,3 +35,9 @@ class OCRTableObject(BaseModel): format_: Annotated[Format, pydantic.Field(alias="format")] r"""Format of the table""" + + +try: + OCRTableObject.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/ocrusageinfo.py b/packages/azure/src/mistralai/azure/client/models/ocrusageinfo.py index e2ceba35..f63315d2 100644 --- a/packages/azure/src/mistralai/azure/client/models/ocrusageinfo.py +++ b/packages/azure/src/mistralai/azure/client/models/ocrusageinfo.py @@ -28,30 +28,25 @@ class OCRUsageInfo(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["doc_size_bytes"] - nullable_fields = ["doc_size_bytes"] - null_default_fields = [] - + optional_fields = set(["doc_size_bytes"]) + nullable_fields = set(["doc_size_bytes"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/prediction.py b/packages/azure/src/mistralai/azure/client/models/prediction.py index 6b8d6480..1fa1d782 100644 --- a/packages/azure/src/mistralai/azure/client/models/prediction.py +++ b/packages/azure/src/mistralai/azure/client/models/prediction.py @@ -1,9 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL from mistralai.azure.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -19,7 +20,7 @@ class PredictionTypedDict(TypedDict): class Prediction(BaseModel): r"""Enable users to specify an expected completion, optimizing response times by leveraging known or predictable content.""" - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["content"]], AfterValidator(validate_const("content")) ], @@ -27,3 +28,25 @@ class Prediction(BaseModel): ] = "content" content: Optional[str] = "" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type", "content"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + Prediction.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/referencechunk.py b/packages/azure/src/mistralai/azure/client/models/referencechunk.py index e0bcb06b..f7af9bf9 100644 --- a/packages/azure/src/mistralai/azure/client/models/referencechunk.py +++ b/packages/azure/src/mistralai/azure/client/models/referencechunk.py @@ -1,9 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL from mistralai.azure.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional from typing_extensions import Annotated, TypedDict @@ -17,9 +18,31 @@ class ReferenceChunkTypedDict(TypedDict): class ReferenceChunk(BaseModel): reference_ids: List[int] - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["reference"]], AfterValidator(validate_const("reference")) ], pydantic.Field(alias="type"), ] = "reference" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ReferenceChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/responseformat.py b/packages/azure/src/mistralai/azure/client/models/responseformat.py index 39fb03a2..20fd2b86 100644 --- a/packages/azure/src/mistralai/azure/client/models/responseformat.py +++ b/packages/azure/src/mistralai/azure/client/models/responseformat.py @@ -31,30 +31,25 @@ class ResponseFormat(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "json_schema"] - nullable_fields = ["json_schema"] - null_default_fields = [] - + optional_fields = set(["type", "json_schema"]) + nullable_fields = set(["json_schema"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/azure/src/mistralai/azure/client/models/systemmessage.py b/packages/azure/src/mistralai/azure/client/models/systemmessage.py index 38c280c8..d4bd0044 100644 --- a/packages/azure/src/mistralai/azure/client/models/systemmessage.py +++ b/packages/azure/src/mistralai/azure/client/models/systemmessage.py @@ -32,7 +32,13 @@ class SystemMessageTypedDict(TypedDict): class SystemMessage(BaseModel): content: SystemMessageContent - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["system"], AfterValidator(validate_const("system"))], pydantic.Field(alias="role"), ] = "system" + + +try: + SystemMessage.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/systemmessagecontentchunks.py b/packages/azure/src/mistralai/azure/client/models/systemmessagecontentchunks.py index 225f38b7..8de71c90 100644 --- a/packages/azure/src/mistralai/azure/client/models/systemmessagecontentchunks.py +++ b/packages/azure/src/mistralai/azure/client/models/systemmessagecontentchunks.py @@ -15,5 +15,5 @@ SystemMessageContentChunks = Annotated[ - Union[TextChunk, ThinkChunk], Field(discriminator="TYPE") + Union[TextChunk, ThinkChunk], Field(discriminator="type") ] diff --git a/packages/azure/src/mistralai/azure/client/models/textchunk.py b/packages/azure/src/mistralai/azure/client/models/textchunk.py index e513c143..92951485 100644 --- a/packages/azure/src/mistralai/azure/client/models/textchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/textchunk.py @@ -17,7 +17,13 @@ class TextChunkTypedDict(TypedDict): class TextChunk(BaseModel): text: str - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["text"], AfterValidator(validate_const("text"))], pydantic.Field(alias="type"), ] = "text" + + +try: + TextChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/thinkchunk.py b/packages/azure/src/mistralai/azure/client/models/thinkchunk.py index e769399f..4e881aad 100644 --- a/packages/azure/src/mistralai/azure/client/models/thinkchunk.py +++ b/packages/azure/src/mistralai/azure/client/models/thinkchunk.py @@ -3,9 +3,10 @@ from __future__ import annotations from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL from mistralai.azure.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -29,10 +30,32 @@ class ThinkChunkTypedDict(TypedDict): class ThinkChunk(BaseModel): thinking: List[Thinking] - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["thinking"], AfterValidator(validate_const("thinking"))], pydantic.Field(alias="type"), ] = "thinking" closed: Optional[bool] = None r"""Whether the thinking chunk is closed or not. Currently only used for prefixing.""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["closed"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ThinkChunk.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/tool.py b/packages/azure/src/mistralai/azure/client/models/tool.py index 169305bc..87329bdb 100644 --- a/packages/azure/src/mistralai/azure/client/models/tool.py +++ b/packages/azure/src/mistralai/azure/client/models/tool.py @@ -3,7 +3,8 @@ from __future__ import annotations from .function import Function, FunctionTypedDict from .tooltypes import ToolTypes -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -17,3 +18,19 @@ class Tool(BaseModel): function: Function type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/azure/src/mistralai/azure/client/models/toolcall.py b/packages/azure/src/mistralai/azure/client/models/toolcall.py index a589b1b3..ada1ea65 100644 --- a/packages/azure/src/mistralai/azure/client/models/toolcall.py +++ b/packages/azure/src/mistralai/azure/client/models/toolcall.py @@ -3,7 +3,8 @@ from __future__ import annotations from .functioncall import FunctionCall, FunctionCallTypedDict from .tooltypes import ToolTypes -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -23,3 +24,19 @@ class ToolCall(BaseModel): type: Optional[ToolTypes] = None index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["id", "type", "index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/azure/src/mistralai/azure/client/models/toolchoice.py b/packages/azure/src/mistralai/azure/client/models/toolchoice.py index 1f623222..ddb9e141 100644 --- a/packages/azure/src/mistralai/azure/client/models/toolchoice.py +++ b/packages/azure/src/mistralai/azure/client/models/toolchoice.py @@ -3,7 +3,8 @@ from __future__ import annotations from .functionname import FunctionName, FunctionNameTypedDict from .tooltypes import ToolTypes -from mistralai.azure.client.types import BaseModel +from mistralai.azure.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -23,3 +24,19 @@ class ToolChoice(BaseModel): r"""this restriction of `Function` is used to select a specific function to call""" type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/azure/src/mistralai/azure/client/models/toolmessage.py b/packages/azure/src/mistralai/azure/client/models/toolmessage.py index a73fd6bf..670210de 100644 --- a/packages/azure/src/mistralai/azure/client/models/toolmessage.py +++ b/packages/azure/src/mistralai/azure/client/models/toolmessage.py @@ -35,7 +35,7 @@ class ToolMessageTypedDict(TypedDict): class ToolMessage(BaseModel): content: Nullable[ToolMessageContent] - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["tool"], AfterValidator(validate_const("tool"))], pydantic.Field(alias="role"), ] = "tool" @@ -46,30 +46,31 @@ class ToolMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["tool_call_id", "name"] - nullable_fields = ["content", "tool_call_id", "name"] - null_default_fields = [] - + optional_fields = set(["tool_call_id", "name"]) + nullable_fields = set(["content", "tool_call_id", "name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolMessage.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/models/usageinfo.py b/packages/azure/src/mistralai/azure/client/models/usageinfo.py index 19a6b09f..0f04c87c 100644 --- a/packages/azure/src/mistralai/azure/client/models/usageinfo.py +++ b/packages/azure/src/mistralai/azure/client/models/usageinfo.py @@ -45,37 +45,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "prompt_tokens", - "completion_tokens", - "total_tokens", - "prompt_audio_seconds", - ] - nullable_fields = ["prompt_audio_seconds"] - null_default_fields = [] - + optional_fields = set( + [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_audio_seconds", + ] + ) + nullable_fields = set(["prompt_audio_seconds"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v diff --git a/packages/azure/src/mistralai/azure/client/models/usermessage.py b/packages/azure/src/mistralai/azure/client/models/usermessage.py index 96439c64..549b01ca 100644 --- a/packages/azure/src/mistralai/azure/client/models/usermessage.py +++ b/packages/azure/src/mistralai/azure/client/models/usermessage.py @@ -27,37 +27,27 @@ class UserMessageTypedDict(TypedDict): class UserMessage(BaseModel): content: Nullable[UserMessageContent] - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["user"], AfterValidator(validate_const("user"))], pydantic.Field(alias="role"), ] = "user" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["content"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m + + +try: + UserMessage.model_rebuild() +except NameError: + pass diff --git a/packages/azure/src/mistralai/azure/client/ocr.py b/packages/azure/src/mistralai/azure/client/ocr.py index 098e764b..b9270f6a 100644 --- a/packages/azure/src/mistralai/azure/client/ocr.py +++ b/packages/azure/src/mistralai/azure/client/ocr.py @@ -1,7 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from .basesdk import BaseSDK -from mistralai.azure.client import models, utils +from mistralai.azure.client import errors, models, utils from mistralai.azure.client._hooks import HookContext from mistralai.azure.client.types import Nullable, OptionalNullable, UNSET from mistralai.azure.client.utils.unmarshal_json_response import unmarshal_json_response @@ -130,17 +130,17 @@ def process( return unmarshal_json_response(models.OCRResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def process_async( self, @@ -263,14 +263,14 @@ async def process_async( return unmarshal_json_response(models.OCRResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/packages/azure/src/mistralai/azure/client/utils/__init__.py b/packages/azure/src/mistralai/azure/client/utils/__init__.py index 05f26ade..b488c2df 100644 --- a/packages/azure/src/mistralai/azure/client/utils/__init__.py +++ b/packages/azure/src/mistralai/azure/client/utils/__init__.py @@ -1,14 +1,23 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING, Callable, TypeVar +import asyncio + +from .dynamic_imports import lazy_getattr, lazy_dir + +_T = TypeVar("_T") + + +async def run_sync_in_thread(func: Callable[..., _T], *args) -> _T: + """Run a synchronous function in a thread pool to avoid blocking the event loop.""" + return await asyncio.to_thread(func, *args) + if TYPE_CHECKING: from .annotations import get_discriminator from .datetimes import parse_datetime from .enums import OpenEnumMeta + from .unions import parse_open_union from .headers import get_headers, get_response_headers from .metadata import ( FieldMetadata, @@ -76,6 +85,7 @@ "match_response", "MultipartFormMetadata", "OpenEnumMeta", + "parse_open_union", "PathParamMetadata", "QueryParamMetadata", "remove_suffix", @@ -128,6 +138,7 @@ "match_response": ".values", "MultipartFormMetadata": ".metadata", "OpenEnumMeta": ".enums", + "parse_open_union": ".unions", "PathParamMetadata": ".metadata", "QueryParamMetadata": ".metadata", "remove_suffix": ".url", @@ -157,38 +168,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"no {attr_name} found in _dynamic_imports, module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - return getattr(module, attr_name) - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/azure/src/mistralai/azure/client/utils/dynamic_imports.py b/packages/azure/src/mistralai/azure/client/utils/dynamic_imports.py new file mode 100644 index 00000000..673edf82 --- /dev/null +++ b/packages/azure/src/mistralai/azure/client/utils/dynamic_imports.py @@ -0,0 +1,54 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from importlib import import_module +import builtins +import sys + + +def dynamic_import(package, modname, retries=3): + """Import a module relative to package, retrying on KeyError from half-initialized modules.""" + for attempt in range(retries): + try: + return import_module(modname, package) + except KeyError: + sys.modules.pop(modname, None) + if attempt == retries - 1: + break + raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") + + +def lazy_getattr(attr_name, *, package, dynamic_imports, sub_packages=None): + """Module-level __getattr__ that lazily loads from a dynamic_imports mapping. + + Args: + attr_name: The attribute being looked up. + package: The caller's __package__ (for relative imports). + dynamic_imports: Dict mapping attribute names to relative module paths. + sub_packages: Optional list of subpackage names to lazy-load. + """ + module_name = dynamic_imports.get(attr_name) + if module_name is not None: + try: + module = dynamic_import(package, module_name) + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + if sub_packages and attr_name in sub_packages: + return import_module(f".{attr_name}", package) + + raise AttributeError(f"module '{package}' has no attribute '{attr_name}'") + + +def lazy_dir(*, dynamic_imports, sub_packages=None): + """Module-level __dir__ that lists lazily-loadable attributes.""" + lazy_attrs = builtins.list(dynamic_imports.keys()) + if sub_packages: + lazy_attrs.extend(sub_packages) + return builtins.sorted(lazy_attrs) diff --git a/packages/azure/src/mistralai/azure/client/utils/eventstreaming.py b/packages/azure/src/mistralai/azure/client/utils/eventstreaming.py index 0969899b..f2052fc2 100644 --- a/packages/azure/src/mistralai/azure/client/utils/eventstreaming.py +++ b/packages/azure/src/mistralai/azure/client/utils/eventstreaming.py @@ -2,7 +2,9 @@ import re import json +from dataclasses import dataclass, asdict from typing import ( + Any, Callable, Generic, TypeVar, @@ -22,6 +24,7 @@ class EventStream(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: Generator[T, None, None] + _closed: bool def __init__( self, @@ -33,17 +36,21 @@ def __init__( self.response = response self.generator = stream_events(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __iter__(self): return self def __next__(self): + if self._closed: + raise StopIteration return next(self.generator) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): + self._closed = True self.response.close() @@ -53,6 +60,7 @@ class EventStreamAsync(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: AsyncGenerator[T, None] + _closed: bool def __init__( self, @@ -64,33 +72,45 @@ def __init__( self.response = response self.generator = stream_events_async(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __aiter__(self): return self async def __anext__(self): + if self._closed: + raise StopAsyncIteration return await self.generator.__anext__() async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): + self._closed = True await self.response.aclose() +@dataclass class ServerEvent: id: Optional[str] = None event: Optional[str] = None - data: Optional[str] = None + data: Any = None retry: Optional[int] = None MESSAGE_BOUNDARIES = [ b"\r\n\r\n", - b"\n\n", + b"\r\n\r", + b"\r\n\n", + b"\r\r\n", + b"\n\r\n", b"\r\r", + b"\n\r", + b"\n\n", ] +UTF8_BOM = b"\xef\xbb\xbf" + async def stream_events_async( response: httpx.Response, @@ -99,14 +119,10 @@ async def stream_events_async( ) -> AsyncGenerator[T, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None async for chunk in response.aiter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -121,15 +137,22 @@ async def stream_events_async( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + await response.aclose() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event @@ -141,14 +164,10 @@ def stream_events( ) -> Generator[T, None, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None for chunk in response.iter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -163,22 +182,33 @@ def stream_events( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + response.close() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event def _parse_event( - raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None -) -> Tuple[Optional[T], bool]: + *, + raw: bytearray, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + event_id: Optional[str] = None, +) -> Tuple[Optional[T], bool, Optional[str]]: block = raw.decode() lines = re.split(r"\r?\n|\r", block) publish = False @@ -189,13 +219,16 @@ def _parse_event( continue delim = line.find(":") - if delim <= 0: + if delim == 0: continue - field = line[0:delim] - value = line[delim + 1 :] if delim < len(line) - 1 else "" - if len(value) and value[0] == " ": - value = value[1:] + field = line + value = "" + if delim > 0: + field = line[0:delim] + value = line[delim + 1 :] if delim < len(line) - 1 else "" + if len(value) and value[0] == " ": + value = value[1:] if field == "event": event.event = value @@ -204,37 +237,36 @@ def _parse_event( data += value + "\n" publish = True elif field == "id": - event.id = value publish = True + if "\x00" not in value: + event_id = value elif field == "retry": - event.retry = int(value) if value.isdigit() else None + if value.isdigit(): + event.retry = int(value) publish = True + event.id = event_id + if sentinel and data == f"{sentinel}\n": - return None, True + return None, True, event_id if data: data = data[:-1] - event.data = data - - data_is_primitive = ( - data.isnumeric() or data == "true" or data == "false" or data == "null" - ) - data_is_json = ( - data.startswith("{") or data.startswith("[") or data.startswith('"') - ) - - if data_is_primitive or data_is_json: - try: - event.data = json.loads(data) - except Exception: - pass + try: + event.data = json.loads(data) + except json.JSONDecodeError: + event.data = data out = None if publish: - out = decoder(json.dumps(event.__dict__)) - - return out, False + out_dict = { + k: v + for k, v in asdict(event).items() + if v is not None or (k == "data" and data) + } + out = decoder(json.dumps(out_dict)) + + return out, False, event_id def _peek_sequence(position: int, buffer: bytearray, sequence: bytes): diff --git a/packages/azure/src/mistralai/azure/client/utils/forms.py b/packages/azure/src/mistralai/azure/client/utils/forms.py index f961e76b..1e550bd5 100644 --- a/packages/azure/src/mistralai/azure/client/utils/forms.py +++ b/packages/azure/src/mistralai/azure/client/utils/forms.py @@ -142,7 +142,7 @@ def serialize_multipart_form( if field_metadata.file: if isinstance(val, List): # Handle array of files - array_field_name = f_name + "[]" + array_field_name = f_name for file_obj in val: if not _is_set(file_obj): continue @@ -185,7 +185,7 @@ def serialize_multipart_form( continue values.append(_val_to_string(value)) - array_field_name = f_name + "[]" + array_field_name = f_name form[array_field_name] = values else: form[f_name] = _val_to_string(val) diff --git a/packages/azure/src/mistralai/azure/client/utils/retries.py b/packages/azure/src/mistralai/azure/client/utils/retries.py index 88a91b10..af07d4e9 100644 --- a/packages/azure/src/mistralai/azure/client/utils/retries.py +++ b/packages/azure/src/mistralai/azure/client/utils/retries.py @@ -144,12 +144,7 @@ def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise @@ -193,12 +188,7 @@ async def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise diff --git a/packages/azure/src/mistralai/azure/client/utils/security.py b/packages/azure/src/mistralai/azure/client/utils/security.py index 295a3f40..17996bd5 100644 --- a/packages/azure/src/mistralai/azure/client/utils/security.py +++ b/packages/azure/src/mistralai/azure/client/utils/security.py @@ -135,6 +135,8 @@ def _parse_security_scheme_value( elif scheme_type == "http": if sub_type == "bearer": headers[header_name] = _apply_bearer(value) + elif sub_type == "basic": + headers[header_name] = value elif sub_type == "custom": return else: diff --git a/packages/azure/src/mistralai/azure/client/utils/unions.py b/packages/azure/src/mistralai/azure/client/utils/unions.py new file mode 100644 index 00000000..a227f4e8 --- /dev/null +++ b/packages/azure/src/mistralai/azure/client/utils/unions.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import Any + +from pydantic import BaseModel, TypeAdapter + + +def parse_open_union( + v: Any, + *, + disc_key: str, + variants: dict[str, Any], + unknown_cls: type, + union_name: str, +) -> Any: + """Parse an open discriminated union value with forward-compatibility. + + Known discriminator values are dispatched to their variant types. + Unknown discriminator values produce an instance of the fallback class, + preserving the raw payload for inspection. + """ + if isinstance(v, BaseModel): + return v + if not isinstance(v, dict) or disc_key not in v: + raise ValueError(f"{union_name}: expected object with '{disc_key}' field") + disc = v[disc_key] + variant_cls = variants.get(disc) + if variant_cls is not None: + if isinstance(variant_cls, type) and issubclass(variant_cls, BaseModel): + return variant_cls.model_validate(v) + return TypeAdapter(variant_cls).validate_python(v) + return unknown_cls(raw=v) diff --git a/packages/azure/src/mistralai/azure/client/utils/unmarshal_json_response.py b/packages/azure/src/mistralai/azure/client/utils/unmarshal_json_response.py index 5317ac87..fe0c9b8e 100644 --- a/packages/azure/src/mistralai/azure/client/utils/unmarshal_json_response.py +++ b/packages/azure/src/mistralai/azure/client/utils/unmarshal_json_response.py @@ -5,7 +5,7 @@ import httpx from .serializers import unmarshal_json -from mistralai.azure.client import models +from mistralai.azure.client import errors T = TypeVar("T") @@ -30,7 +30,7 @@ def unmarshal_json_response( try: return unmarshal_json(body, typ) except Exception as e: - raise models.ResponseValidationError( + raise errors.ResponseValidationError( "Response validation failed", http_res, e, diff --git a/packages/gcp/.speakeasy/gen.lock b/packages/gcp/.speakeasy/gen.lock index 8ce6c5ea..517e1a85 100644 --- a/packages/gcp/.speakeasy/gen.lock +++ b/packages/gcp/.speakeasy/gen.lock @@ -3,46 +3,46 @@ id: ec60f2d8-7869-45c1-918e-773d41a8cf74 management: docChecksum: bc4a0ba9c38418d84a6a8a76b503977b docVersion: 1.0.0 - speakeasyVersion: 1.685.0 - generationVersion: 2.794.1 - releaseVersion: 2.0.0a4 - configChecksum: 95fb33ae488fa72fb4ba17c6b93551a9 + speakeasyVersion: 1.729.0 + generationVersion: 2.841.0 + releaseVersion: 2.0.0-a4.1 + configChecksum: bfe17061a2e5ac54039980ad7a48fd77 repoURL: https://github.com/mistralai/client-python.git repoSubDirectory: packages/gcp installationURL: https://github.com/mistralai/client-python.git#subdirectory=packages/gcp published: true persistentEdits: - generation_id: 5f09b925-b801-4bf0-bda9-6f9a3212c588 - pristine_commit_hash: 20c7ce96f6a097f98d3367b89a7bea09ba0ded7c - pristine_tree_hash: c30d519719cc0cd17d7bf53ae2c13b1d8b125c5e + generation_id: c7e2e696-b223-4993-a79b-2e6f15242c30 + pristine_commit_hash: 86953bc23bb7fcfc3c2525f79114411bc27e8f75 + pristine_tree_hash: 93675a8857b7519918499101d4a5e30fc7fe2c4a features: python: additionalDependencies: 1.0.0 additionalProperties: 1.0.1 configurableModuleName: 0.2.0 - constsAndDefaults: 1.0.5 - core: 5.23.18 + constsAndDefaults: 1.0.7 + core: 6.0.12 defaultEnabledRetries: 0.2.0 enumUnions: 0.1.0 envVarSecurityUsage: 0.3.2 examples: 3.0.2 flatRequests: 1.0.1 - globalSecurity: 3.0.4 + globalSecurity: 3.0.5 globalSecurityCallbacks: 1.0.0 globalSecurityFlattening: 1.0.0 globalServerURLs: 3.2.0 includes: 3.0.0 methodArguments: 1.0.2 - nameOverrides: 3.0.1 - nullables: 1.0.1 - openEnums: 1.0.2 - responseFormat: 1.0.1 - retries: 3.0.3 - sdkHooks: 1.2.0 - serverEvents: 1.0.11 + nameOverrides: 3.0.3 + nullables: 1.0.2 + openEnums: 1.0.4 + responseFormat: 1.1.0 + retries: 3.0.4 + sdkHooks: 1.2.1 + serverEvents: 1.0.13 serverEventsSentinels: 0.1.0 serverIDs: 3.0.0 - unions: 3.1.1 + unions: 3.1.4 trackedFiles: .gitattributes: id: 24139dae6567 @@ -52,6 +52,10 @@ trackedFiles: id: 89aa447020cd last_write_checksum: sha1:f84632c81029fcdda8c3b0c768d02b836fc80526 pristine_git_object: 8d79f0abb72526f1fb34a4c03e5bba612c6ba2ae + docs/errors/httpvalidationerror.md: + id: 7fe2e5327e07 + last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e + pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/arguments.md: id: 7ea5e33709a7 last_write_checksum: sha1:09eea126210d7fd0353e60a76bf1dbed173f13ec @@ -74,8 +78,8 @@ trackedFiles: pristine_git_object: b2f15ecbe88328de95b4961ddb3940fd8a6ee64b docs/models/chatcompletionrequest.md: id: adffe90369d0 - last_write_checksum: sha1:2bf5152388f18436be4fe1c541b8d423dcae175c - pristine_git_object: 61a25d86e7dc292621f7f6c0f8909137a16b9112 + last_write_checksum: sha1:6374e05aeb66d48137d657acaa89527df2db35c6 + pristine_git_object: 8dbd4a82ad1d7725b9a6ce56daea208ca01b9210 docs/models/chatcompletionrequestmessage.md: id: 3f5e170d418c last_write_checksum: sha1:7921c5a508a9f88adc01caab34e26182b8035607 @@ -94,8 +98,8 @@ trackedFiles: pristine_git_object: a0465ffbfc5558628953e03fbc53b80bbdc8649b docs/models/chatcompletionstreamrequest.md: id: cf8f29558a68 - last_write_checksum: sha1:f30b2a7353e7406eb30af841a1a211ea5cb30cb0 - pristine_git_object: 3e790e7dc7143b0ae287ad2df14ae7e7a4085e3f + last_write_checksum: sha1:e23cf88a5a9b0c99e68d06a8450b8bfb9aee33a2 + pristine_git_object: db76b6c81a71607f94c212a542fe30e082053a90 docs/models/chatcompletionstreamrequestmessage.md: id: 053a98476cd2 last_write_checksum: sha1:8270692463fab1243d9de4bbef7162daa64e52c5 @@ -168,10 +172,6 @@ trackedFiles: id: 4b3bd62c0f26 last_write_checksum: sha1:754fe32bdffe53c1057b302702f5516f4e551cfb pristine_git_object: 87d7b4852de629015166605b273deb9341202dc0 - docs/models/httpvalidationerror.md: - id: a211c095f2ac - last_write_checksum: sha1:277a46811144643262651853dc6176d21b33573e - pristine_git_object: 712a148c3e2305dca4c702851865f9f8c8e674cc docs/models/imagedetail.md: id: f8217529b496 last_write_checksum: sha1:fdf19ac9459f64616240955cb81a84ef03e775c8 @@ -298,8 +298,8 @@ trackedFiles: pristine_git_object: 3e38f1a929f7d6b1d6de74604aa87e3d8f010544 pylintrc: id: 7ce8b9f946e6 - last_write_checksum: sha1:6b615d49741eb9ae16375d3a499767783d1128a1 - pristine_git_object: a8fcb932ba2a01c5e96e3b04c59371e930b75558 + last_write_checksum: sha1:8f871a5aac4b10bff724c9d91b8d7496eb1fbdde + pristine_git_object: 0391ac11bdc5526b697b69d047d568a611ce87d0 scripts/prepare_readme.py: id: e0c5957a6035 last_write_checksum: sha1:eb988bc0e00ed4bb14e9a3572845af14f06c9b42 @@ -310,8 +310,8 @@ trackedFiles: pristine_git_object: c35748f360329c2bc370e9b189f49b1a360b2c48 src/mistralai/gcp/client/__init__.py: id: 4f63decd432e - last_write_checksum: sha1:36306d1d404b6aeb912d27f1d9c52f098ff7bf9b - pristine_git_object: dd02e42e4cc509dc90e6ae70493054021faa5f9c + last_write_checksum: sha1:da077c0bdfcef64a4a5aea91a17292f72fa2b088 + pristine_git_object: 833c68cd526fe34aab2b7e7c45f974f7f4b9e120 src/mistralai/gcp/client/_hooks/__init__.py: id: adcb191838d1 last_write_checksum: sha1:e3111289afd28ad557c21d9e2f918caabfb7037d @@ -326,84 +326,108 @@ trackedFiles: pristine_git_object: ea95bed210db9180824efddfb1b3e47f5bf96489 src/mistralai/gcp/client/_version.py: id: f87319e32c7b - last_write_checksum: sha1:8c07e6351bf2df8239b3c02db75ee469dba53394 - pristine_git_object: ba48dac120cadd3f586b38659dc04e50838daa11 + last_write_checksum: sha1:85dd6da1d6503d717e8c9bd6d62278b469d3b464 + pristine_git_object: 204c92a656855ad281e86a74467e71ae1b04639f src/mistralai/gcp/client/basesdk.py: id: 4d594572857b - last_write_checksum: sha1:45ed4b6078e01d52d1dcf4bdc5494b700f1a6cde - pristine_git_object: 6f9f5fd9a2cadc8893d6693c1d40a8114c0fdc2a + last_write_checksum: sha1:d8ef9e2f4fa97d402eb9f5472ceb80fb39693991 + pristine_git_object: b3edcb0aca1882d0cbe4d499cfba9cb5464c5b58 src/mistralai/gcp/client/chat.py: id: 4c41f05f786e - last_write_checksum: sha1:a4d5609f51dee25dfc34f83e1eda2888aa01dda6 - pristine_git_object: 78541248204cbd5b92b6d6d362924fcdada8a948 + last_write_checksum: sha1:60b2697e2ecfb62eebed910007e62ab1df565eec + pristine_git_object: 925d69eda2fdac458045cc12327ca72997e07600 + src/mistralai/gcp/client/errors/__init__.py: + id: c51c8ed21629 + last_write_checksum: sha1:29f08ad600a712ff572843a250839ef92efac19b + pristine_git_object: 00c8ee0031486b5416bb6745397c463e1a5dbba6 + src/mistralai/gcp/client/errors/httpvalidationerror.py: + id: b0e25f1c36bd + last_write_checksum: sha1:c863914ed6704ee6c3ad99a77d8b1e742de069d0 + pristine_git_object: 598068197b9ed7e7756de01325f7967a719e46ea + src/mistralai/gcp/client/errors/mistralgcperror.py: + id: 9a9cad8f5d36 + last_write_checksum: sha1:7267c829a842a94c5b84ac248a1610ce45f3db4e + pristine_git_object: 9de91bf2a4abf8b0d0922eb6062fe2ab817a8aee + src/mistralai/gcp/client/errors/no_response_error.py: + id: 2d3e5fe56122 + last_write_checksum: sha1:7f326424a7d5ae1bcd5c89a0d6b3dbda9138942f + pristine_git_object: 1deab64bc43e1e65bf3c412d326a4032ce342366 + src/mistralai/gcp/client/errors/responsevalidationerror.py: + id: 98f7bac284be + last_write_checksum: sha1:1b835d2ce8754b22d5fa269077d7a2eec11d7f29 + pristine_git_object: e8bd83c19b0629bb0ddf7a240e9b8371cb33fff3 + src/mistralai/gcp/client/errors/sdkerror.py: + id: c53aee73c8e1 + last_write_checksum: sha1:080933e9f354b675988a132813f23e55f9e5db74 + pristine_git_object: 6980924626fa5fbf67fb62a30fd23d5883dbe650 src/mistralai/gcp/client/fim.py: id: 13d2d208e0ef - last_write_checksum: sha1:e6226c1720effd314afa7b9a21e5ec2347e5a74f - pristine_git_object: e2acacd58c28fa7ea718240b01a3714f7fc0b8f6 + last_write_checksum: sha1:1027165887446ce0764ad542ca52f61b460c71b8 + pristine_git_object: 4202102ae5218784a10ee93ada5a0643d23a1d0c src/mistralai/gcp/client/httpclient.py: id: a53dd7be6a4c last_write_checksum: sha1:5e55338d6ee9f01ab648cad4380201a8a3da7dd7 pristine_git_object: 89560b566073785535643e694c112bedbd3db13d src/mistralai/gcp/client/models/__init__.py: id: d9e976d01972 - last_write_checksum: sha1:f0554ff6b81286615330ffea947e619bc508bf19 - pristine_git_object: fb446c259f4ca1cc97ec64aac197f52b8224a096 + last_write_checksum: sha1:97ddfc7f70abd5e1a0b36be6dce209b69e9d5c73 + pristine_git_object: 575f64040c90152e74954b749ea89bce5a07e02e src/mistralai/gcp/client/models/assistantmessage.py: id: d39c4bdd289e - last_write_checksum: sha1:08fa98315561d5bb2c094bf57e7d66639b86e3ee - pristine_git_object: 7061775b3dbd9be0b978ff2a2cb07e52c01fc80a + last_write_checksum: sha1:c813783bcbeec4e40f12e007d1dde4aed8ec71cf + pristine_git_object: 702ac4708abb95fc18d138500b8353715c2dbc98 src/mistralai/gcp/client/models/chatcompletionchoice.py: id: 8e65b56f3e6d last_write_checksum: sha1:e6d1382e9f880b866130d900fd866997aaf80e45 pristine_git_object: ae5a2fbf38afbd86233dcaa8aa1c8441f5ed9eba src/mistralai/gcp/client/models/chatcompletionrequest.py: id: 4694a31c0003 - last_write_checksum: sha1:edb744ec2baca1f9ba6574662fffb36fb7d3faab - pristine_git_object: 1bc039221910bf88396c96affe735c8ac822920b + last_write_checksum: sha1:80fcbbcde773c22c93cf2db63beef2cfe3777497 + pristine_git_object: 8229c5bb13ded84039f3d8ddb95ac0a9c184e1bd src/mistralai/gcp/client/models/chatcompletionresponse.py: id: dd9e4796fca9 last_write_checksum: sha1:76d7257583389ff5021e320a8f9a45a6deb07c7c pristine_git_object: 317c4d84e378c14294d58c5aefd8c55ffe28754a src/mistralai/gcp/client/models/chatcompletionstreamrequest.py: id: 7294862af8ea - last_write_checksum: sha1:75d5bfcc204339b152dc78e33ac449c3aa9b5432 - pristine_git_object: 0a5a0021a4862e7b92a5c31679bf42bfa704d15b + last_write_checksum: sha1:899210f881bdbe0a0d94e29fe7044fabbccc578c + pristine_git_object: 3c228d2e7edf08c36f310e190a8dedc7b4958459 src/mistralai/gcp/client/models/completionchunk.py: id: 6b9ed8c30877 - last_write_checksum: sha1:4afc07c1824d81640f52a5c8bf89fde8893269b9 - pristine_git_object: 9e54cb6dfaccf7f815b40be585e11585cb5fef78 + last_write_checksum: sha1:f1f091e94e3c1c1aefd3c3bb60c8de8236ab0ead + pristine_git_object: a0b1ae2fa3109a2c2b76bbc483b691d88dc9a15c src/mistralai/gcp/client/models/completionevent.py: id: 3f55c4b8fc75 last_write_checksum: sha1:66665d921fd27df6ef0efce996a5446e49b989d8 pristine_git_object: bb1550093ce9adcb9bcd0548b69796e82f4f260b src/mistralai/gcp/client/models/completionresponsestreamchoice.py: id: ad9b98ca7e1c - last_write_checksum: sha1:04d195584fe4ea16544685e9989e5ae35205179a - pristine_git_object: 6f306721fbe47780c778833b80e97ab5d25d8367 + last_write_checksum: sha1:c4f9d733461bdb9a0d6c96e82212de7dddc04ffe + pristine_git_object: e58d4c88009ed3696d2a3a57f3796d8fb067019d src/mistralai/gcp/client/models/contentchunk.py: id: 8714d3bf2698 - last_write_checksum: sha1:347f43b4d7dcab18e09e6c3323f745a25ecfb04c - pristine_git_object: 1cd9e502ab7d4860daa79f907beafa71da086ab3 + last_write_checksum: sha1:acab1b53b1d324544c6aa6c4126a3fb5265278d2 + pristine_git_object: 18d481505e17d2125e380d796b0c406b0e66d601 src/mistralai/gcp/client/models/deltamessage.py: id: 404fc85f1a4c - last_write_checksum: sha1:3375624531d12279d225fb07a68e0396483b962f - pristine_git_object: 96923518438137cb729a69149b5b99be49836ad7 + last_write_checksum: sha1:982c2d15a570c7f4d5e1c3b012db46ea3bac609b + pristine_git_object: 63e6a7f3e50c138f235f5a36277aa8668f85cef1 src/mistralai/gcp/client/models/fimcompletionrequest.py: id: 5b79e2595d31 - last_write_checksum: sha1:cc4fa68c60a6a500a9887e47dd2e9220327c6226 - pristine_git_object: f37bbcc3cab020224531da898dd99cc175d49cd9 + last_write_checksum: sha1:80a2e3d5e10c240869cd96c41936d714cf8bf801 + pristine_git_object: e460f76c59315c22c75194936f1f3b232331f83c src/mistralai/gcp/client/models/fimcompletionresponse.py: id: 402f602d29b8 last_write_checksum: sha1:cfe26848c7b14d6e374b7944d7ad44df822990b0 pristine_git_object: 5b80da3f03e4e99dfca971a53af1cf6472c889bb src/mistralai/gcp/client/models/fimcompletionstreamrequest.py: id: 31190cf25070 - last_write_checksum: sha1:720f0a039a62cb508d513475a0e4bad45a9aa03c - pristine_git_object: 8e6102612998bde70d830bb0b8ee3a5e2a4dd01e + last_write_checksum: sha1:a95ab8c20b2fdff48102f08258a556af9f382ffa + pristine_git_object: fffc305499e578f77e42fb7992b59e933ae0ae7c src/mistralai/gcp/client/models/function.py: id: 2285a899b32e - last_write_checksum: sha1:a69ad9c8cd723e78a3949deefe43bcbf57426916 - pristine_git_object: 28577eff06d052aeb58c2795dd0a92ae4f2e7552 + last_write_checksum: sha1:6439f7f781174ae56b2b02ccbb4d02b08d8d5a03 + pristine_git_object: 439e831355444e0f9e82d23636651201f0db4bfc src/mistralai/gcp/client/models/functioncall.py: id: 17bb51f08e5f last_write_checksum: sha1:b5fe2f061ea5f47057ee50011babc80de27e0ee6 @@ -412,114 +436,94 @@ trackedFiles: id: 313a6001145f last_write_checksum: sha1:fe1eefaed314efa788bd15beb63bf6b81abb307e pristine_git_object: 585b9e39762e49356823e211ad86f701bca389b8 - src/mistralai/gcp/client/models/httpvalidationerror.py: - id: bdb67f678798 - last_write_checksum: sha1:58b6b7a2b2f8e4f66fc14c38540a26cfd2541a1e - pristine_git_object: 57df72607adc980b061d092f77140c6dbd36ecec src/mistralai/gcp/client/models/imagedetail.py: id: a28b2f3e2cb5 last_write_checksum: sha1:a4874529961952019eaa86a2fa0989626f537a4c pristine_git_object: 68ed76080716eb1424b13f182479f57e51a4fabf src/mistralai/gcp/client/models/imageurl.py: id: 4e330f3eae74 - last_write_checksum: sha1:3c5d70c0698b1b4b9c99087241227bab3dc0cdbf - pristine_git_object: d4f298f12d8095590cded5714091596b505c59b1 + last_write_checksum: sha1:6c0bee7d7c765fb2611131c7d270041671b428b8 + pristine_git_object: 903d0a1a45eeb7c5e8cde80f624b6e039de1f4cc src/mistralai/gcp/client/models/imageurlchunk.py: id: e68a4a393e9b - last_write_checksum: sha1:2eb2c8a205e5f8b320e2f597075cad9e5e27475b - pristine_git_object: fc5284c102c17a33c1ba6029c87515d509cd014b + last_write_checksum: sha1:eae1d0e69a90b2f7513492e4cd0ed68d647f0b5d + pristine_git_object: 4bec0eec882c1eeee8a80f663ff7d686ca677ea0 src/mistralai/gcp/client/models/jsonschema.py: id: 39c6e7d412a0 - last_write_checksum: sha1:29ba87457959588ff7d8188ae2382fb88740151d - pristine_git_object: 443c429dd1461d7a6817335626cd585577c5bffe - src/mistralai/gcp/client/models/mistralgcperror.py: - id: 278d296220ff - last_write_checksum: sha1:7267c829a842a94c5b84ac248a1610ce45f3db4e - pristine_git_object: 9de91bf2a4abf8b0d0922eb6062fe2ab817a8aee + last_write_checksum: sha1:19b34a5e3f5c00d1a1b96f91a6e02f5ad12240c7 + pristine_git_object: 684ac09f0460bef1f26bf0030b79bbc7141ab99b src/mistralai/gcp/client/models/mistralpromptmode.py: id: 8be4a4a683e4 last_write_checksum: sha1:c958567e95490abf3941fde69be69733e8afb90e pristine_git_object: c765e4f1a0b86735255771231377f13d62f3d7a6 - src/mistralai/gcp/client/models/no_response_error.py: - id: 2a7fa173594b - last_write_checksum: sha1:7f326424a7d5ae1bcd5c89a0d6b3dbda9138942f - pristine_git_object: 1deab64bc43e1e65bf3c412d326a4032ce342366 src/mistralai/gcp/client/models/prediction.py: id: 7a5463285bc8 - last_write_checksum: sha1:1d1e81082d1c2bfd613f0bc00f7173995ad67c0c - pristine_git_object: f53579edc665dd7fc1cc2497b0cd05b69e541cd8 + last_write_checksum: sha1:67c4a9b06d3e98552409a26960e0afd64f829b53 + pristine_git_object: 2e325289fd6c2a987ad270fd808f7b9a3f423440 src/mistralai/gcp/client/models/referencechunk.py: id: 523e477f8725 - last_write_checksum: sha1:d29c5fc1d8b6850fdeb3abc7f83185de92571b23 - pristine_git_object: 274ea7f7b142714d96040428fe7b87eeb48432cb + last_write_checksum: sha1:aade1dc05c2a2672630eb17626e4f49367d6bfe6 + pristine_git_object: 261c4755641093a38f97b17dce3a387623e69ead src/mistralai/gcp/client/models/responseformat.py: id: 06774bb65b42 - last_write_checksum: sha1:a52a60dc45c0b0939b99754d6c0c603ef2f737d3 - pristine_git_object: 34ae6b039a6c83c603fc6d47f6b2f233ec6c817a + last_write_checksum: sha1:7e64de46ef34718003cf0d198868a193f2122178 + pristine_git_object: f3aa9930e0f8a009dac628300d66c6209a538031 src/mistralai/gcp/client/models/responseformats.py: id: 18112ad0f6db last_write_checksum: sha1:a212e85d286b5b49219f57d071a2232ff8b5263b pristine_git_object: cbf83ce7b54ff8634f741334831807bfb5c98991 - src/mistralai/gcp/client/models/responsevalidationerror.py: - id: b90c1c09ac00 - last_write_checksum: sha1:e4321c1141ba7b1f6a8c217124e02ea0c70d9ad1 - pristine_git_object: 0e86ea6cb79fd4598d527dfef403ba66d435d3bb - src/mistralai/gcp/client/models/sdkerror.py: - id: a7cf4fa8974b - last_write_checksum: sha1:a3b60234deceb7fbcb57926c265e02e9fefc0835 - pristine_git_object: 00bc1d99353e7e2415d92c3e906c2c09712e5a64 src/mistralai/gcp/client/models/security.py: id: 7e13bda8273b last_write_checksum: sha1:7086e929823d4eefe80cc279b605adfc8bbb08aa pristine_git_object: 10a469b54d5e03873fb7d7d98627f2376c93d484 src/mistralai/gcp/client/models/systemmessage.py: id: 6537664d2d1b - last_write_checksum: sha1:e7f8dc73154c6985fcdbb77259df9bbc4745f976 - pristine_git_object: a7d695a7791eb5e97cd8f74e81c475c78e4b1a67 + last_write_checksum: sha1:779cb07cfd63ebe9eec496177cf1a8f5c077e417 + pristine_git_object: b3795c4bf4e97853979e0042cf4bd151d60ef974 src/mistralai/gcp/client/models/systemmessagecontentchunks.py: id: e120a6469c89 - last_write_checksum: sha1:55529f2f29ba3087fbf117dbbe64e1dda92b2958 - pristine_git_object: 225f38b712f5f3c7abfd526cc8c0386687814f36 + last_write_checksum: sha1:d1f96498cbb540b91425e70ffa33892ff4d1c8cd + pristine_git_object: 8de71c909eda2ed0166a6be8f8ee029956e5766b src/mistralai/gcp/client/models/textchunk.py: id: a134f120d4dc - last_write_checksum: sha1:9f46381e01f235560017ea80fbc85210eb625a99 - pristine_git_object: 77576c9fd87f0861bf6a3496aeae7e8bb8dc986a + last_write_checksum: sha1:1ccc7d232136d6278d670542d192f36f46862df1 + pristine_git_object: 690322725c0f852a005d08c5b722c41709868b22 src/mistralai/gcp/client/models/thinkchunk.py: id: 59a1d1ef2020 - last_write_checksum: sha1:9fcccb19d87bc41f771cae710eeb8f28c229070d - pristine_git_object: b65fffb21d5cb060acaa648a70e337a43595cd32 + last_write_checksum: sha1:066eeb10de301264e601a9ec64d21e1cc13b0c20 + pristine_git_object: 33ec83949499d99a28c55bb20429ab948bb5b1e8 src/mistralai/gcp/client/models/tool.py: id: 4b27d45e56ad - last_write_checksum: sha1:6d139575b740ea1f9f68a73b7bc2c95c30a10345 - pristine_git_object: d09c68542f2cb1f3bae0ffc7a7b163ad08a8e973 + last_write_checksum: sha1:cb0d879a55218fd7753bdd005be8a155982feb8f + pristine_git_object: 670aa81f8767e7c079105cf5995225168b4d6eb6 src/mistralai/gcp/client/models/toolcall.py: id: e6c25869a579 - last_write_checksum: sha1:5acf0eca8b1f4c459c6d8cadbbbd90605201ddc0 - pristine_git_object: a1edf3370426957980ff212367d56909ea8fa548 + last_write_checksum: sha1:f88e69a8e352025ca4b6897f6c16e1f7e4cd7264 + pristine_git_object: 3ea8e283c8f695bcc1fbc734b0074d37c2efeac8 src/mistralai/gcp/client/models/toolchoice.py: id: cb13a9f64c92 - last_write_checksum: sha1:3ad6b48b24b39609e86229193ad18d84b1b3c818 - pristine_git_object: de3828dac8bc23e32b9f9434adccc770b5ce1212 + last_write_checksum: sha1:71be72b1aae19aef1f8a461c89b71ad6daa009b7 + pristine_git_object: 6e795fd72792f740c8aa5b4da7d1f516018f2c2e src/mistralai/gcp/client/models/toolchoiceenum.py: id: d62e9c92d93c last_write_checksum: sha1:3dbba9a58c5569aafe115f3f7713a52b01ad8620 pristine_git_object: 01f6f677b379f9e3c99db9d1ad248cb0033a2804 src/mistralai/gcp/client/models/toolmessage.py: id: b3774786c2e9 - last_write_checksum: sha1:ef21eb555f41ec70010dbcea1a155af988936061 - pristine_git_object: 65b1d9d62d37361a06b3fd3ee1790eb3a976a94f + last_write_checksum: sha1:3d414da8132467d1472ebe485802ffc78eb6f7e4 + pristine_git_object: ce160391f37ce3568daf2877f8dc1aa0f3694821 src/mistralai/gcp/client/models/tooltypes.py: id: 5926c64f5229 last_write_checksum: sha1:ffd576511eed9f823c3d67df9fc5574d8d53c54b pristine_git_object: fd1aa13d7b8c5d9bdb0922e04b8bd653ff843f60 src/mistralai/gcp/client/models/usageinfo.py: id: 3aab1af66cff - last_write_checksum: sha1:47c6311bc1db47849a72c8e1bcc64dac9cec637e - pristine_git_object: 9b7207b10ea9d46d8216c104c45be1a52fb093d9 + last_write_checksum: sha1:c0c949ac48ed35efe1e8fbf820b8e390edd9c3ce + pristine_git_object: cb6feb6e8d173d39b828d8f5b38af75173b4f7f2 src/mistralai/gcp/client/models/usermessage.py: id: 9cfa7260463e - last_write_checksum: sha1:580acf868a3d180eef34b2af9c2d20f78e4fb693 - pristine_git_object: c083e16d4aa536beec9f9e1151ebbe8c1797798c + last_write_checksum: sha1:780984241b84a7dfe1f6ad6eccace1204bfec8bd + pristine_git_object: e237e900421a9e65fd15aede29ade0e510b189f6 src/mistralai/gcp/client/models/validationerror.py: id: 6b4f4910ea9c last_write_checksum: sha1:2792fd656f55519902f37670fb9fb3b43b4aa016 @@ -542,8 +546,8 @@ trackedFiles: pristine_git_object: a9a640a1a7048736383f96c67c6290c86bf536ee src/mistralai/gcp/client/utils/__init__.py: id: a30c8ff6dcff - last_write_checksum: sha1:887f56a717845fab7445cc368d2a17d850c3565a - pristine_git_object: 05f26ade57efb8c54a774fbcb939fb1a7dc655ce + last_write_checksum: sha1:3ad22a588864c93bd3a16605f669955b5f3b8053 + pristine_git_object: b488c2df1390b22be3050eee72832a91c76d5385 src/mistralai/gcp/client/utils/annotations.py: id: 9b2cd4ffc6e9 last_write_checksum: sha1:a4824ad65f730303e4e1e3ec1febf87b4eb46dbc @@ -552,18 +556,22 @@ trackedFiles: id: dd1f0f91ea9d last_write_checksum: sha1:c721e4123000e7dc61ec52b28a739439d9e17341 pristine_git_object: a6c52cd61bbe2d459046c940ce5e8c469f2f0664 + src/mistralai/gcp/client/utils/dynamic_imports.py: + id: 0091051cb000 + last_write_checksum: sha1:a1940c63feb8eddfd8026de53384baf5056d5dcc + pristine_git_object: 673edf82a97d0fea7295625d3e092ea369a36b79 src/mistralai/gcp/client/utils/enums.py: id: 2341407d5443 last_write_checksum: sha1:bc8c3c1285ae09ba8a094ee5c3d9c7f41fa1284d pristine_git_object: 3324e1bc2668c54c4d5f5a1a845675319757a828 src/mistralai/gcp/client/utils/eventstreaming.py: id: bb66f0c3e0dc - last_write_checksum: sha1:bababae5d54b7efc360db701daa49e18a92c2f3b - pristine_git_object: 0969899bfc491e5e408d05643525f347ea95e4fc + last_write_checksum: sha1:ffa870a25a7e4e2015bfd7a467ccd3aa1de97f0e + pristine_git_object: f2052fc22d9fd6c663ba3dce019fe234ca37108b src/mistralai/gcp/client/utils/forms.py: id: ebf34781d6bd - last_write_checksum: sha1:15fa7e9ab1611e062a9984cf06cb20969713d295 - pristine_git_object: f961e76beaf0a8b1fe0dda44754a74eebd3608e7 + last_write_checksum: sha1:0ca31459b99f761fcc6d0557a0a38daac4ad50f4 + pristine_git_object: 1e550bd5c2c35d977ddc10f49d77c23cb12c158d src/mistralai/gcp/client/utils/headers.py: id: 4c369582903e last_write_checksum: sha1:7c6df233ee006332b566a8afa9ce9a245941d935 @@ -586,20 +594,24 @@ trackedFiles: pristine_git_object: 1de32b6d26f46590232f398fdba6ce0072f1659c src/mistralai/gcp/client/utils/retries.py: id: 542ebd75b79b - last_write_checksum: sha1:5b97ac4f59357d70c2529975d50364c88bcad607 - pristine_git_object: 88a91b10cd2076b4a2c6cff2ac6bfaa5e3c5ad13 + last_write_checksum: sha1:471372f5c5d1dd5583239c9cf3c75f1b636e5d87 + pristine_git_object: af07d4e941007af4213c5ec9047ef8a2fca04e5e src/mistralai/gcp/client/utils/security.py: id: 5273152365f4 - last_write_checksum: sha1:a17130ace2c0db6394f38dd941ad2b700cc755c8 - pristine_git_object: 295a3f40031dbb40073ad227fd4a355660f97ab2 + last_write_checksum: sha1:435dd8b180cefcd733e635b9fa45512da091d9c0 + pristine_git_object: 17996bd54b8624009802fbbdf30bcb4225b8dfed src/mistralai/gcp/client/utils/serializers.py: id: a7836e553d41 last_write_checksum: sha1:ce1d8d7f500a9ccba0aeca5057cee9c271f4dfd7 pristine_git_object: 14321eb479de81d0d9580ec8291e0ff91bf29e57 + src/mistralai/gcp/client/utils/unions.py: + id: 8abba1cf1b6d + last_write_checksum: sha1:6e38049f323e0b5fb4bd0e88ab51ec447197ccb0 + pristine_git_object: a227f4e87be22fce682fcae5813b71835199ec5e src/mistralai/gcp/client/utils/unmarshal_json_response.py: id: d972d22cf934 - last_write_checksum: sha1:a68b9e491188e6c1956a749530eac3c7dc8004e7 - pristine_git_object: 83e8275e59adf51fb01a0579ae26627ee29fee49 + last_write_checksum: sha1:5c75fb4ee04ae80a350ceb96abf4e1fdb255ee6c + pristine_git_object: ead3e5a00171b3a97af5112b6cd9ece698ce74f5 src/mistralai/gcp/client/utils/url.py: id: 0d311bbcb8f8 last_write_checksum: sha1:6479961baa90432ca25626f8e40a7bbc32e73b41 diff --git a/packages/gcp/.speakeasy/gen.yaml b/packages/gcp/.speakeasy/gen.yaml index 93cc5a42..54336636 100644 --- a/packages/gcp/.speakeasy/gen.yaml +++ b/packages/gcp/.speakeasy/gen.yaml @@ -13,8 +13,9 @@ generation: requestResponseComponentNamesFeb2024: true securityFeb2025: true sharedErrorComponentsApr2025: true - methodSignaturesApr2024: true sharedNestedComponentsJan2026: true + nameOverrideFeb2026: true + methodSignaturesApr2024: true auth: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false @@ -22,31 +23,37 @@ generation: schemas: allOfMergeStrategy: shallowMerge requestBodyFieldName: "" + versioningStrategy: automatic persistentEdits: {} tests: generateTests: true generateNewTests: false skipResponseBodyAssertions: false python: - version: 2.0.0a4 + version: 2.0.0-a4.1 additionalDependencies: dev: pytest: ^8.2.2 pytest-asyncio: ^0.23.7 + main: {} allowedRedefinedBuiltins: - id - object + - input + - dir asyncMode: both authors: - Mistral baseErrorName: MistralGCPError clientServerStatusCodesAsErrors: true - constFieldCasing: upper + constFieldCasing: normal defaultErrorName: SDKError description: Python Client SDK for the Mistral AI API in GCP. enableCustomCodeRegions: false enumFormat: union fixFlags: + asyncPaginationSep2025: true + conflictResistantModelImportsFeb2026: true responseRequiredSep2024: true flatAdditionalProperties: true flattenGlobalSecurity: true @@ -58,17 +65,17 @@ python: option: openapi paths: callbacks: "" - errors: "" + errors: errors operations: "" shared: "" webhooks: "" inferUnionDiscriminators: true inputModelSuffix: input license: "" - maxMethodParams: 15 + maxMethodParams: 999 methodArguments: infer-optional-args moduleName: mistralai.gcp.client - multipartArrayFormat: legacy + multipartArrayFormat: standard outputModelSuffix: output packageManager: uv packageName: mistralai-gcp @@ -78,3 +85,4 @@ python: responseFormat: flat sseFlatResponse: false templateVersion: v2 + useAsyncHooks: false diff --git a/packages/gcp/docs/models/httpvalidationerror.md b/packages/gcp/docs/errors/httpvalidationerror.md similarity index 100% rename from packages/gcp/docs/models/httpvalidationerror.md rename to packages/gcp/docs/errors/httpvalidationerror.md diff --git a/packages/gcp/docs/models/chatcompletionrequest.md b/packages/gcp/docs/models/chatcompletionrequest.md index 61a25d86..8dbd4a82 100644 --- a/packages/gcp/docs/models/chatcompletionrequest.md +++ b/packages/gcp/docs/models/chatcompletionrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionRequestMessage](../models/chatcompletionrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionRequestToolChoice]](../models/chatcompletionrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/packages/gcp/docs/models/chatcompletionstreamrequest.md b/packages/gcp/docs/models/chatcompletionstreamrequest.md index 3e790e7d..db76b6c8 100644 --- a/packages/gcp/docs/models/chatcompletionstreamrequest.md +++ b/packages/gcp/docs/models/chatcompletionstreamrequest.md @@ -14,7 +14,7 @@ | `random_seed` | *OptionalNullable[int]* | :heavy_minus_sign: | The seed to use for random sampling. If set, different calls will generate deterministic results. | | | `metadata` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `messages` | List[[models.ChatCompletionStreamRequestMessage](../models/chatcompletionstreamrequestmessage.md)] | :heavy_check_mark: | The prompt(s) to generate completions for, encoded as a list of dict with role and content. | [
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence."
}
] | -| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | {
"type": "text"
} | +| `response_format` | [Optional[models.ResponseFormat]](../models/responseformat.md) | :heavy_minus_sign: | Specify the format that the model must output. By default it will use `{ "type": "text" }`. Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. Setting to `{ "type": "json_schema" }` enables JSON schema mode, which guarantees the message the model generates is in JSON and follows the schema you provide. | **Example 1:** {
"type": "text"
}
**Example 2:** {
"type": "json_object"
}
**Example 3:** {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"name": {
"title": "Name",
"type": "string"
},
"authors": {
"items": {
"type": "string"
},
"title": "Authors",
"type": "array"
}
},
"required": [
"name",
"authors"
],
"title": "Book",
"type": "object",
"additionalProperties": false
},
"name": "book",
"strict": true
}
} | | `tools` | List[[models.Tool](../models/tool.md)] | :heavy_minus_sign: | A list of tools the model may call. Use this to provide a list of functions the model may generate JSON inputs for. | | | `tool_choice` | [Optional[models.ChatCompletionStreamRequestToolChoice]](../models/chatcompletionstreamrequesttoolchoice.md) | :heavy_minus_sign: | Controls which (if any) tool is called by the model. `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `any` or `required` means the model must call one or more tools. Specifying a particular tool via `{"type": "function", "function": {"name": "my_function"}}` forces the model to call that tool. | | | `presence_penalty` | *Optional[float]* | :heavy_minus_sign: | The `presence_penalty` determines how much the model penalizes the repetition of words or phrases. A higher presence penalty encourages the model to use a wider variety of words and phrases, making the output more diverse and creative. | | diff --git a/packages/gcp/pylintrc b/packages/gcp/pylintrc index a8fcb932..0391ac11 100644 --- a/packages/gcp/pylintrc +++ b/packages/gcp/pylintrc @@ -89,7 +89,7 @@ persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. -py-version=3.9 +py-version=3.10 # Discover python modules and packages in the file system subtree. recursive=no @@ -459,7 +459,8 @@ disable=raw-checker-failed, consider-using-with, wildcard-import, unused-wildcard-import, - too-many-return-statements + too-many-return-statements, + redefined-builtin # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -641,7 +642,7 @@ additional-builtins= allow-global-unused-variables=yes # List of names allowed to shadow builtins -allowed-redefined-builtins=id,object +allowed-redefined-builtins=id,object,input,dir # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. diff --git a/packages/gcp/src/mistralai/gcp/client/__init__.py b/packages/gcp/src/mistralai/gcp/client/__init__.py index dd02e42e..833c68cd 100644 --- a/packages/gcp/src/mistralai/gcp/client/__init__.py +++ b/packages/gcp/src/mistralai/gcp/client/__init__.py @@ -9,7 +9,6 @@ ) from .sdk import * from .sdkconfiguration import * -from .models import * VERSION: str = __version__ diff --git a/packages/gcp/src/mistralai/gcp/client/_version.py b/packages/gcp/src/mistralai/gcp/client/_version.py index ba48dac1..204c92a6 100644 --- a/packages/gcp/src/mistralai/gcp/client/_version.py +++ b/packages/gcp/src/mistralai/gcp/client/_version.py @@ -3,10 +3,10 @@ import importlib.metadata __title__: str = "mistralai-gcp" -__version__: str = "2.0.0a4" +__version__: str = "2.0.0-a4.1" __openapi_doc_version__: str = "1.0.0" -__gen_version__: str = "2.794.1" -__user_agent__: str = "speakeasy-sdk/python 2.0.0a4 2.794.1 1.0.0 mistralai-gcp" +__gen_version__: str = "2.841.0" +__user_agent__: str = "speakeasy-sdk/python 2.0.0-a4.1 2.841.0 1.0.0 mistralai-gcp" try: if __package__ is not None: diff --git a/packages/gcp/src/mistralai/gcp/client/basesdk.py b/packages/gcp/src/mistralai/gcp/client/basesdk.py index 6f9f5fd9..b3edcb0a 100644 --- a/packages/gcp/src/mistralai/gcp/client/basesdk.py +++ b/packages/gcp/src/mistralai/gcp/client/basesdk.py @@ -2,7 +2,7 @@ from .sdkconfiguration import SDKConfiguration import httpx -from mistralai.gcp.client import models, utils +from mistralai.gcp.client import errors, utils from mistralai.gcp.client._hooks import ( AfterErrorContext, AfterSuccessContext, @@ -12,6 +12,7 @@ RetryConfig, SerializedRequestBody, get_body_content, + run_sync_in_thread, ) from typing import Callable, List, Mapping, Optional, Tuple from urllib.parse import parse_qs, urlparse @@ -264,7 +265,7 @@ def do(): if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -285,7 +286,7 @@ def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -315,7 +316,10 @@ async def do_request_async( async def do(): http_res = None try: - req = hooks.before_request(BeforeRequestContext(hook_ctx), request) + req = await run_sync_in_thread( + hooks.before_request, BeforeRequestContext(hook_ctx), request + ) + logger.debug( "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", req.method, @@ -329,14 +333,17 @@ async def do(): http_res = await client.send(req, stream=stream) except Exception as e: - _, e = hooks.after_error(AfterErrorContext(hook_ctx), None, e) + _, e = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), None, e + ) + if e is not None: logger.debug("Request Exception", exc_info=True) raise e if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -347,9 +354,10 @@ async def do(): ) if utils.match_status_codes(error_status_codes, http_res.status_code): - result, err = hooks.after_error( - AfterErrorContext(hook_ctx), http_res, None + result, err = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), http_res, None ) + if err is not None: logger.debug("Request Exception", exc_info=True) raise err @@ -357,7 +365,7 @@ async def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -369,6 +377,8 @@ async def do(): http_res = await do() if not utils.match_status_codes(error_status_codes, http_res.status_code): - http_res = hooks.after_success(AfterSuccessContext(hook_ctx), http_res) + http_res = await run_sync_in_thread( + hooks.after_success, AfterSuccessContext(hook_ctx), http_res + ) return http_res diff --git a/packages/gcp/src/mistralai/gcp/client/chat.py b/packages/gcp/src/mistralai/gcp/client/chat.py index 78541248..925d69ed 100644 --- a/packages/gcp/src/mistralai/gcp/client/chat.py +++ b/packages/gcp/src/mistralai/gcp/client/chat.py @@ -1,7 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from .basesdk import BaseSDK -from mistralai.gcp.client import models, utils +from mistralai.gcp.client import errors, models, utils from mistralai.gcp.client._hooks import HookContext from mistralai.gcp.client.types import OptionalNullable, UNSET from mistralai.gcp.client.utils import eventstreaming @@ -176,18 +176,18 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, @@ -353,18 +353,18 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def complete( self, @@ -521,17 +521,17 @@ def complete( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, @@ -688,14 +688,14 @@ async def complete_async( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/packages/gcp/src/mistralai/gcp/client/errors/__init__.py b/packages/gcp/src/mistralai/gcp/client/errors/__init__.py new file mode 100644 index 00000000..00c8ee00 --- /dev/null +++ b/packages/gcp/src/mistralai/gcp/client/errors/__init__.py @@ -0,0 +1,39 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .mistralgcperror import MistralGCPError +from typing import Any, TYPE_CHECKING + +from mistralai.gcp.client.utils.dynamic_imports import lazy_getattr, lazy_dir + +if TYPE_CHECKING: + from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData + from .no_response_error import NoResponseError + from .responsevalidationerror import ResponseValidationError + from .sdkerror import SDKError + +__all__ = [ + "HTTPValidationError", + "HTTPValidationErrorData", + "MistralGCPError", + "NoResponseError", + "ResponseValidationError", + "SDKError", +] + +_dynamic_imports: dict[str, str] = { + "HTTPValidationError": ".httpvalidationerror", + "HTTPValidationErrorData": ".httpvalidationerror", + "NoResponseError": ".no_response_error", + "ResponseValidationError": ".responsevalidationerror", + "SDKError": ".sdkerror", +} + + +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) + + +def __dir__(): + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/gcp/src/mistralai/gcp/client/models/httpvalidationerror.py b/packages/gcp/src/mistralai/gcp/client/errors/httpvalidationerror.py similarity index 77% rename from packages/gcp/src/mistralai/gcp/client/models/httpvalidationerror.py rename to packages/gcp/src/mistralai/gcp/client/errors/httpvalidationerror.py index 57df7260..59806819 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/httpvalidationerror.py +++ b/packages/gcp/src/mistralai/gcp/client/errors/httpvalidationerror.py @@ -1,16 +1,16 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from .validationerror import ValidationError from dataclasses import dataclass, field import httpx -from mistralai.gcp.client.models import MistralGCPError +from mistralai.gcp.client.errors import MistralGCPError +from mistralai.gcp.client.models import validationerror as models_validationerror from mistralai.gcp.client.types import BaseModel from typing import List, Optional class HTTPValidationErrorData(BaseModel): - detail: Optional[List[ValidationError]] = None + detail: Optional[List[models_validationerror.ValidationError]] = None @dataclass(unsafe_hash=True) diff --git a/packages/gcp/src/mistralai/gcp/client/models/mistralgcperror.py b/packages/gcp/src/mistralai/gcp/client/errors/mistralgcperror.py similarity index 100% rename from packages/gcp/src/mistralai/gcp/client/models/mistralgcperror.py rename to packages/gcp/src/mistralai/gcp/client/errors/mistralgcperror.py diff --git a/packages/gcp/src/mistralai/gcp/client/models/no_response_error.py b/packages/gcp/src/mistralai/gcp/client/errors/no_response_error.py similarity index 100% rename from packages/gcp/src/mistralai/gcp/client/models/no_response_error.py rename to packages/gcp/src/mistralai/gcp/client/errors/no_response_error.py diff --git a/packages/gcp/src/mistralai/gcp/client/models/responsevalidationerror.py b/packages/gcp/src/mistralai/gcp/client/errors/responsevalidationerror.py similarity index 92% rename from packages/gcp/src/mistralai/gcp/client/models/responsevalidationerror.py rename to packages/gcp/src/mistralai/gcp/client/errors/responsevalidationerror.py index 0e86ea6c..e8bd83c1 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/responsevalidationerror.py +++ b/packages/gcp/src/mistralai/gcp/client/errors/responsevalidationerror.py @@ -4,7 +4,7 @@ from typing import Optional from dataclasses import dataclass -from mistralai.gcp.client.models import MistralGCPError +from mistralai.gcp.client.errors import MistralGCPError @dataclass(unsafe_hash=True) diff --git a/packages/gcp/src/mistralai/gcp/client/models/sdkerror.py b/packages/gcp/src/mistralai/gcp/client/errors/sdkerror.py similarity index 95% rename from packages/gcp/src/mistralai/gcp/client/models/sdkerror.py rename to packages/gcp/src/mistralai/gcp/client/errors/sdkerror.py index 00bc1d99..69809246 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/sdkerror.py +++ b/packages/gcp/src/mistralai/gcp/client/errors/sdkerror.py @@ -4,7 +4,7 @@ from typing import Optional from dataclasses import dataclass -from mistralai.gcp.client.models import MistralGCPError +from mistralai.gcp.client.errors import MistralGCPError MAX_MESSAGE_LEN = 10_000 diff --git a/packages/gcp/src/mistralai/gcp/client/fim.py b/packages/gcp/src/mistralai/gcp/client/fim.py index e2acacd5..4202102a 100644 --- a/packages/gcp/src/mistralai/gcp/client/fim.py +++ b/packages/gcp/src/mistralai/gcp/client/fim.py @@ -1,7 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from .basesdk import BaseSDK -from mistralai.gcp.client import models, utils +from mistralai.gcp.client import errors, models, utils from mistralai.gcp.client._hooks import HookContext from mistralai.gcp.client.types import OptionalNullable, UNSET from mistralai.gcp.client.utils import eventstreaming @@ -133,18 +133,18 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, @@ -267,18 +267,18 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def complete( self, @@ -394,17 +394,17 @@ def complete( return unmarshal_json_response(models.FIMCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, @@ -520,14 +520,14 @@ async def complete_async( return unmarshal_json_response(models.FIMCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/packages/gcp/src/mistralai/gcp/client/models/__init__.py b/packages/gcp/src/mistralai/gcp/client/models/__init__.py index fb446c25..575f6404 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/__init__.py +++ b/packages/gcp/src/mistralai/gcp/client/models/__init__.py @@ -1,10 +1,8 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from .mistralgcperror import MistralGCPError -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING + +from mistralai.gcp.client.utils.dynamic_imports import lazy_getattr, lazy_dir if TYPE_CHECKING: from .assistantmessage import ( @@ -49,7 +47,7 @@ CompletionResponseStreamChoiceFinishReason, CompletionResponseStreamChoiceTypedDict, ) - from .contentchunk import ContentChunk, ContentChunkTypedDict + from .contentchunk import ContentChunk, ContentChunkTypedDict, UnknownContentChunk from .deltamessage import ( DeltaMessage, DeltaMessageContent, @@ -80,7 +78,6 @@ FunctionCallTypedDict, ) from .functionname import FunctionName, FunctionNameTypedDict - from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData from .imagedetail import ImageDetail from .imageurl import ImageURL, ImageURLTypedDict from .imageurlchunk import ( @@ -91,13 +88,10 @@ ) from .jsonschema import JSONSchema, JSONSchemaTypedDict from .mistralpromptmode import MistralPromptMode - from .no_response_error import NoResponseError from .prediction import Prediction, PredictionTypedDict from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .responseformat import ResponseFormat, ResponseFormatTypedDict from .responseformats import ResponseFormats - from .responsevalidationerror import ResponseValidationError - from .sdkerror import SDKError from .security import Security, SecurityTypedDict from .systemmessage import ( SystemMessage, @@ -193,8 +187,6 @@ "FunctionName", "FunctionNameTypedDict", "FunctionTypedDict", - "HTTPValidationError", - "HTTPValidationErrorData", "ImageDetail", "ImageURL", "ImageURLChunk", @@ -206,9 +198,7 @@ "JSONSchemaTypedDict", "Loc", "LocTypedDict", - "MistralGCPError", "MistralPromptMode", - "NoResponseError", "Prediction", "PredictionTypedDict", "ReferenceChunk", @@ -216,8 +206,6 @@ "ResponseFormat", "ResponseFormatTypedDict", "ResponseFormats", - "ResponseValidationError", - "SDKError", "Security", "SecurityTypedDict", "SystemMessage", @@ -244,6 +232,7 @@ "ToolMessageTypedDict", "ToolTypedDict", "ToolTypes", + "UnknownContentChunk", "UsageInfo", "UsageInfoTypedDict", "UserMessage", @@ -289,6 +278,7 @@ "CompletionResponseStreamChoiceTypedDict": ".completionresponsestreamchoice", "ContentChunk": ".contentchunk", "ContentChunkTypedDict": ".contentchunk", + "UnknownContentChunk": ".contentchunk", "DeltaMessage": ".deltamessage", "DeltaMessageContent": ".deltamessage", "DeltaMessageContentTypedDict": ".deltamessage", @@ -311,8 +301,6 @@ "FunctionCallTypedDict": ".functioncall", "FunctionName": ".functionname", "FunctionNameTypedDict": ".functionname", - "HTTPValidationError": ".httpvalidationerror", - "HTTPValidationErrorData": ".httpvalidationerror", "ImageDetail": ".imagedetail", "ImageURL": ".imageurl", "ImageURLTypedDict": ".imageurl", @@ -323,7 +311,6 @@ "JSONSchema": ".jsonschema", "JSONSchemaTypedDict": ".jsonschema", "MistralPromptMode": ".mistralpromptmode", - "NoResponseError": ".no_response_error", "Prediction": ".prediction", "PredictionTypedDict": ".prediction", "ReferenceChunk": ".referencechunk", @@ -331,8 +318,6 @@ "ResponseFormat": ".responseformat", "ResponseFormatTypedDict": ".responseformat", "ResponseFormats": ".responseformats", - "ResponseValidationError": ".responsevalidationerror", - "SDKError": ".sdkerror", "Security": ".security", "SecurityTypedDict": ".security", "SystemMessage": ".systemmessage", @@ -372,39 +357,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"No {attr_name} found in _dynamic_imports for module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - result = getattr(module, attr_name) - return result - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/gcp/src/mistralai/gcp/client/models/assistantmessage.py b/packages/gcp/src/mistralai/gcp/client/models/assistantmessage.py index 7061775b..702ac470 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/assistantmessage.py +++ b/packages/gcp/src/mistralai/gcp/client/models/assistantmessage.py @@ -37,7 +37,7 @@ class AssistantMessageTypedDict(TypedDict): class AssistantMessage(BaseModel): - ROLE: Annotated[ + role: Annotated[ Annotated[ Optional[Literal["assistant"]], AfterValidator(validate_const("assistant")) ], @@ -53,30 +53,31 @@ class AssistantMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["role", "content", "tool_calls", "prefix"] - nullable_fields = ["content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls", "prefix"]) + nullable_fields = set(["content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AssistantMessage.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/chatcompletionrequest.py b/packages/gcp/src/mistralai/gcp/client/models/chatcompletionrequest.py index 1bc03922..8229c5bb 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/chatcompletionrequest.py +++ b/packages/gcp/src/mistralai/gcp/client/models/chatcompletionrequest.py @@ -165,55 +165,54 @@ class ChatCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/chatcompletionstreamrequest.py b/packages/gcp/src/mistralai/gcp/client/models/chatcompletionstreamrequest.py index 0a5a0021..3c228d2e 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/chatcompletionstreamrequest.py +++ b/packages/gcp/src/mistralai/gcp/client/models/chatcompletionstreamrequest.py @@ -163,55 +163,54 @@ class ChatCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/completionchunk.py b/packages/gcp/src/mistralai/gcp/client/models/completionchunk.py index 9e54cb6d..a0b1ae2f 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/completionchunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/completionchunk.py @@ -6,7 +6,8 @@ CompletionResponseStreamChoiceTypedDict, ) from .usageinfo import UsageInfo, UsageInfoTypedDict -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import List, Optional from typing_extensions import NotRequired, TypedDict @@ -32,3 +33,19 @@ class CompletionChunk(BaseModel): created: Optional[int] = None usage: Optional[UsageInfo] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "created", "usage"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/completionresponsestreamchoice.py b/packages/gcp/src/mistralai/gcp/client/models/completionresponsestreamchoice.py index 6f306721..e58d4c88 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/completionresponsestreamchoice.py +++ b/packages/gcp/src/mistralai/gcp/client/models/completionresponsestreamchoice.py @@ -39,30 +39,14 @@ class CompletionResponseStreamChoice(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["finish_reason"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/contentchunk.py b/packages/gcp/src/mistralai/gcp/client/models/contentchunk.py index 1cd9e502..18d48150 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/contentchunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/contentchunk.py @@ -4,9 +4,12 @@ from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from mistralai.gcp.client.utils import get_discriminator -from pydantic import Discriminator, Tag -from typing import Union +from functools import partial +from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union from typing_extensions import Annotated, TypeAliasType @@ -16,11 +19,32 @@ ) +class UnknownContentChunk(BaseModel): + r"""A ContentChunk variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_CONTENT_CHUNK_VARIANTS: dict[str, Any] = { + "image_url": ImageURLChunk, + "text": TextChunk, + "reference": ReferenceChunk, +} + + ContentChunk = Annotated[ - Union[ - Annotated[ImageURLChunk, Tag("image_url")], - Annotated[TextChunk, Tag("text")], - Annotated[ReferenceChunk, Tag("reference")], - ], - Discriminator(lambda m: get_discriminator(m, "type", "type")), + Union[ImageURLChunk, TextChunk, ReferenceChunk, UnknownContentChunk], + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_CONTENT_CHUNK_VARIANTS, + unknown_cls=UnknownContentChunk, + union_name="ContentChunk", + ) + ), ] diff --git a/packages/gcp/src/mistralai/gcp/client/models/deltamessage.py b/packages/gcp/src/mistralai/gcp/client/models/deltamessage.py index 96923518..63e6a7f3 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/deltamessage.py +++ b/packages/gcp/src/mistralai/gcp/client/models/deltamessage.py @@ -40,30 +40,25 @@ class DeltaMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["role", "content", "tool_calls"] - nullable_fields = ["role", "content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls"]) + nullable_fields = set(["role", "content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/fimcompletionrequest.py b/packages/gcp/src/mistralai/gcp/client/models/fimcompletionrequest.py index f37bbcc3..e460f76c 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/fimcompletionrequest.py +++ b/packages/gcp/src/mistralai/gcp/client/models/fimcompletionrequest.py @@ -84,47 +84,46 @@ class FIMCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/fimcompletionstreamrequest.py b/packages/gcp/src/mistralai/gcp/client/models/fimcompletionstreamrequest.py index 8e610261..fffc3054 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/fimcompletionstreamrequest.py +++ b/packages/gcp/src/mistralai/gcp/client/models/fimcompletionstreamrequest.py @@ -82,47 +82,46 @@ class FIMCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/function.py b/packages/gcp/src/mistralai/gcp/client/models/function.py index 28577eff..439e8313 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/function.py +++ b/packages/gcp/src/mistralai/gcp/client/models/function.py @@ -1,7 +1,8 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Any, Dict, Optional from typing_extensions import NotRequired, TypedDict @@ -21,3 +22,19 @@ class Function(BaseModel): description: Optional[str] = None strict: Optional[bool] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["description", "strict"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/imageurl.py b/packages/gcp/src/mistralai/gcp/client/models/imageurl.py index d4f298f1..903d0a1a 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/imageurl.py +++ b/packages/gcp/src/mistralai/gcp/client/models/imageurl.py @@ -25,30 +25,25 @@ class ImageURL(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["detail"] - nullable_fields = ["detail"] - null_default_fields = [] - + optional_fields = set(["detail"]) + nullable_fields = set(["detail"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/imageurlchunk.py b/packages/gcp/src/mistralai/gcp/client/models/imageurlchunk.py index fc5284c1..4bec0eec 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/imageurlchunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/imageurlchunk.py @@ -30,7 +30,13 @@ class ImageURLChunk(BaseModel): image_url: ImageURLUnion - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["image_url"], AfterValidator(validate_const("image_url"))], pydantic.Field(alias="type"), ] = "image_url" + + +try: + ImageURLChunk.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/jsonschema.py b/packages/gcp/src/mistralai/gcp/client/models/jsonschema.py index 443c429d..684ac09f 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/jsonschema.py +++ b/packages/gcp/src/mistralai/gcp/client/models/jsonschema.py @@ -32,30 +32,31 @@ class JSONSchema(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["description", "strict"] - nullable_fields = ["description"] - null_default_fields = [] - + optional_fields = set(["description", "strict"]) + nullable_fields = set(["description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + JSONSchema.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/prediction.py b/packages/gcp/src/mistralai/gcp/client/models/prediction.py index f53579ed..2e325289 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/prediction.py +++ b/packages/gcp/src/mistralai/gcp/client/models/prediction.py @@ -1,9 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL from mistralai.gcp.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -19,7 +20,7 @@ class PredictionTypedDict(TypedDict): class Prediction(BaseModel): r"""Enable users to specify an expected completion, optimizing response times by leveraging known or predictable content.""" - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["content"]], AfterValidator(validate_const("content")) ], @@ -27,3 +28,25 @@ class Prediction(BaseModel): ] = "content" content: Optional[str] = "" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type", "content"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + Prediction.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/referencechunk.py b/packages/gcp/src/mistralai/gcp/client/models/referencechunk.py index 274ea7f7..261c4755 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/referencechunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/referencechunk.py @@ -1,9 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL from mistralai.gcp.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional from typing_extensions import Annotated, TypedDict @@ -17,9 +18,31 @@ class ReferenceChunkTypedDict(TypedDict): class ReferenceChunk(BaseModel): reference_ids: List[int] - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["reference"]], AfterValidator(validate_const("reference")) ], pydantic.Field(alias="type"), ] = "reference" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ReferenceChunk.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/responseformat.py b/packages/gcp/src/mistralai/gcp/client/models/responseformat.py index 34ae6b03..f3aa9930 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/responseformat.py +++ b/packages/gcp/src/mistralai/gcp/client/models/responseformat.py @@ -31,30 +31,25 @@ class ResponseFormat(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "json_schema"] - nullable_fields = ["json_schema"] - null_default_fields = [] - + optional_fields = set(["type", "json_schema"]) + nullable_fields = set(["json_schema"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/systemmessage.py b/packages/gcp/src/mistralai/gcp/client/models/systemmessage.py index a7d695a7..b3795c4b 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/systemmessage.py +++ b/packages/gcp/src/mistralai/gcp/client/models/systemmessage.py @@ -32,7 +32,13 @@ class SystemMessageTypedDict(TypedDict): class SystemMessage(BaseModel): content: SystemMessageContent - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["system"], AfterValidator(validate_const("system"))], pydantic.Field(alias="role"), ] = "system" + + +try: + SystemMessage.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/systemmessagecontentchunks.py b/packages/gcp/src/mistralai/gcp/client/models/systemmessagecontentchunks.py index 225f38b7..8de71c90 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/systemmessagecontentchunks.py +++ b/packages/gcp/src/mistralai/gcp/client/models/systemmessagecontentchunks.py @@ -15,5 +15,5 @@ SystemMessageContentChunks = Annotated[ - Union[TextChunk, ThinkChunk], Field(discriminator="TYPE") + Union[TextChunk, ThinkChunk], Field(discriminator="type") ] diff --git a/packages/gcp/src/mistralai/gcp/client/models/textchunk.py b/packages/gcp/src/mistralai/gcp/client/models/textchunk.py index 77576c9f..69032272 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/textchunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/textchunk.py @@ -17,7 +17,13 @@ class TextChunkTypedDict(TypedDict): class TextChunk(BaseModel): text: str - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["text"], AfterValidator(validate_const("text"))], pydantic.Field(alias="type"), ] = "text" + + +try: + TextChunk.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/thinkchunk.py b/packages/gcp/src/mistralai/gcp/client/models/thinkchunk.py index b65fffb2..33ec8394 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/thinkchunk.py +++ b/packages/gcp/src/mistralai/gcp/client/models/thinkchunk.py @@ -3,9 +3,10 @@ from __future__ import annotations from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL from mistralai.gcp.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -29,10 +30,32 @@ class ThinkChunkTypedDict(TypedDict): class ThinkChunk(BaseModel): thinking: List[Thinking] - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["thinking"], AfterValidator(validate_const("thinking"))], pydantic.Field(alias="type"), ] = "thinking" closed: Optional[bool] = None r"""Whether the thinking chunk is closed or not. Currently only used for prefixing.""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["closed"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ThinkChunk.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/tool.py b/packages/gcp/src/mistralai/gcp/client/models/tool.py index d09c6854..670aa81f 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/tool.py +++ b/packages/gcp/src/mistralai/gcp/client/models/tool.py @@ -3,7 +3,8 @@ from __future__ import annotations from .function import Function, FunctionTypedDict from .tooltypes import ToolTypes -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -17,3 +18,19 @@ class Tool(BaseModel): function: Function type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/toolcall.py b/packages/gcp/src/mistralai/gcp/client/models/toolcall.py index a1edf337..3ea8e283 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/toolcall.py +++ b/packages/gcp/src/mistralai/gcp/client/models/toolcall.py @@ -3,7 +3,8 @@ from __future__ import annotations from .functioncall import FunctionCall, FunctionCallTypedDict from .tooltypes import ToolTypes -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -23,3 +24,19 @@ class ToolCall(BaseModel): type: Optional[ToolTypes] = None index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["id", "type", "index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/toolchoice.py b/packages/gcp/src/mistralai/gcp/client/models/toolchoice.py index de3828da..6e795fd7 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/toolchoice.py +++ b/packages/gcp/src/mistralai/gcp/client/models/toolchoice.py @@ -3,7 +3,8 @@ from __future__ import annotations from .functionname import FunctionName, FunctionNameTypedDict from .tooltypes import ToolTypes -from mistralai.gcp.client.types import BaseModel +from mistralai.gcp.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -23,3 +24,19 @@ class ToolChoice(BaseModel): r"""this restriction of `Function` is used to select a specific function to call""" type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/packages/gcp/src/mistralai/gcp/client/models/toolmessage.py b/packages/gcp/src/mistralai/gcp/client/models/toolmessage.py index 65b1d9d6..ce160391 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/toolmessage.py +++ b/packages/gcp/src/mistralai/gcp/client/models/toolmessage.py @@ -35,7 +35,7 @@ class ToolMessageTypedDict(TypedDict): class ToolMessage(BaseModel): content: Nullable[ToolMessageContent] - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["tool"], AfterValidator(validate_const("tool"))], pydantic.Field(alias="role"), ] = "tool" @@ -46,30 +46,31 @@ class ToolMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["tool_call_id", "name"] - nullable_fields = ["content", "tool_call_id", "name"] - null_default_fields = [] - + optional_fields = set(["tool_call_id", "name"]) + nullable_fields = set(["content", "tool_call_id", "name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolMessage.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/models/usageinfo.py b/packages/gcp/src/mistralai/gcp/client/models/usageinfo.py index 9b7207b1..cb6feb6e 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/usageinfo.py +++ b/packages/gcp/src/mistralai/gcp/client/models/usageinfo.py @@ -45,37 +45,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "prompt_tokens", - "completion_tokens", - "total_tokens", - "prompt_audio_seconds", - ] - nullable_fields = ["prompt_audio_seconds"] - null_default_fields = [] - + optional_fields = set( + [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_audio_seconds", + ] + ) + nullable_fields = set(["prompt_audio_seconds"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v diff --git a/packages/gcp/src/mistralai/gcp/client/models/usermessage.py b/packages/gcp/src/mistralai/gcp/client/models/usermessage.py index c083e16d..e237e900 100644 --- a/packages/gcp/src/mistralai/gcp/client/models/usermessage.py +++ b/packages/gcp/src/mistralai/gcp/client/models/usermessage.py @@ -27,37 +27,27 @@ class UserMessageTypedDict(TypedDict): class UserMessage(BaseModel): content: Nullable[UserMessageContent] - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["user"], AfterValidator(validate_const("user"))], pydantic.Field(alias="role"), ] = "user" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["content"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m + + +try: + UserMessage.model_rebuild() +except NameError: + pass diff --git a/packages/gcp/src/mistralai/gcp/client/utils/__init__.py b/packages/gcp/src/mistralai/gcp/client/utils/__init__.py index 05f26ade..b488c2df 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/__init__.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/__init__.py @@ -1,14 +1,23 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING, Callable, TypeVar +import asyncio + +from .dynamic_imports import lazy_getattr, lazy_dir + +_T = TypeVar("_T") + + +async def run_sync_in_thread(func: Callable[..., _T], *args) -> _T: + """Run a synchronous function in a thread pool to avoid blocking the event loop.""" + return await asyncio.to_thread(func, *args) + if TYPE_CHECKING: from .annotations import get_discriminator from .datetimes import parse_datetime from .enums import OpenEnumMeta + from .unions import parse_open_union from .headers import get_headers, get_response_headers from .metadata import ( FieldMetadata, @@ -76,6 +85,7 @@ "match_response", "MultipartFormMetadata", "OpenEnumMeta", + "parse_open_union", "PathParamMetadata", "QueryParamMetadata", "remove_suffix", @@ -128,6 +138,7 @@ "match_response": ".values", "MultipartFormMetadata": ".metadata", "OpenEnumMeta": ".enums", + "parse_open_union": ".unions", "PathParamMetadata": ".metadata", "QueryParamMetadata": ".metadata", "remove_suffix": ".url", @@ -157,38 +168,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"no {attr_name} found in _dynamic_imports, module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - return getattr(module, attr_name) - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/packages/gcp/src/mistralai/gcp/client/utils/dynamic_imports.py b/packages/gcp/src/mistralai/gcp/client/utils/dynamic_imports.py new file mode 100644 index 00000000..673edf82 --- /dev/null +++ b/packages/gcp/src/mistralai/gcp/client/utils/dynamic_imports.py @@ -0,0 +1,54 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from importlib import import_module +import builtins +import sys + + +def dynamic_import(package, modname, retries=3): + """Import a module relative to package, retrying on KeyError from half-initialized modules.""" + for attempt in range(retries): + try: + return import_module(modname, package) + except KeyError: + sys.modules.pop(modname, None) + if attempt == retries - 1: + break + raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") + + +def lazy_getattr(attr_name, *, package, dynamic_imports, sub_packages=None): + """Module-level __getattr__ that lazily loads from a dynamic_imports mapping. + + Args: + attr_name: The attribute being looked up. + package: The caller's __package__ (for relative imports). + dynamic_imports: Dict mapping attribute names to relative module paths. + sub_packages: Optional list of subpackage names to lazy-load. + """ + module_name = dynamic_imports.get(attr_name) + if module_name is not None: + try: + module = dynamic_import(package, module_name) + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + if sub_packages and attr_name in sub_packages: + return import_module(f".{attr_name}", package) + + raise AttributeError(f"module '{package}' has no attribute '{attr_name}'") + + +def lazy_dir(*, dynamic_imports, sub_packages=None): + """Module-level __dir__ that lists lazily-loadable attributes.""" + lazy_attrs = builtins.list(dynamic_imports.keys()) + if sub_packages: + lazy_attrs.extend(sub_packages) + return builtins.sorted(lazy_attrs) diff --git a/packages/gcp/src/mistralai/gcp/client/utils/eventstreaming.py b/packages/gcp/src/mistralai/gcp/client/utils/eventstreaming.py index 0969899b..f2052fc2 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/eventstreaming.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/eventstreaming.py @@ -2,7 +2,9 @@ import re import json +from dataclasses import dataclass, asdict from typing import ( + Any, Callable, Generic, TypeVar, @@ -22,6 +24,7 @@ class EventStream(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: Generator[T, None, None] + _closed: bool def __init__( self, @@ -33,17 +36,21 @@ def __init__( self.response = response self.generator = stream_events(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __iter__(self): return self def __next__(self): + if self._closed: + raise StopIteration return next(self.generator) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): + self._closed = True self.response.close() @@ -53,6 +60,7 @@ class EventStreamAsync(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: AsyncGenerator[T, None] + _closed: bool def __init__( self, @@ -64,33 +72,45 @@ def __init__( self.response = response self.generator = stream_events_async(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __aiter__(self): return self async def __anext__(self): + if self._closed: + raise StopAsyncIteration return await self.generator.__anext__() async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): + self._closed = True await self.response.aclose() +@dataclass class ServerEvent: id: Optional[str] = None event: Optional[str] = None - data: Optional[str] = None + data: Any = None retry: Optional[int] = None MESSAGE_BOUNDARIES = [ b"\r\n\r\n", - b"\n\n", + b"\r\n\r", + b"\r\n\n", + b"\r\r\n", + b"\n\r\n", b"\r\r", + b"\n\r", + b"\n\n", ] +UTF8_BOM = b"\xef\xbb\xbf" + async def stream_events_async( response: httpx.Response, @@ -99,14 +119,10 @@ async def stream_events_async( ) -> AsyncGenerator[T, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None async for chunk in response.aiter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -121,15 +137,22 @@ async def stream_events_async( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + await response.aclose() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event @@ -141,14 +164,10 @@ def stream_events( ) -> Generator[T, None, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None for chunk in response.iter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -163,22 +182,33 @@ def stream_events( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + response.close() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event def _parse_event( - raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None -) -> Tuple[Optional[T], bool]: + *, + raw: bytearray, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + event_id: Optional[str] = None, +) -> Tuple[Optional[T], bool, Optional[str]]: block = raw.decode() lines = re.split(r"\r?\n|\r", block) publish = False @@ -189,13 +219,16 @@ def _parse_event( continue delim = line.find(":") - if delim <= 0: + if delim == 0: continue - field = line[0:delim] - value = line[delim + 1 :] if delim < len(line) - 1 else "" - if len(value) and value[0] == " ": - value = value[1:] + field = line + value = "" + if delim > 0: + field = line[0:delim] + value = line[delim + 1 :] if delim < len(line) - 1 else "" + if len(value) and value[0] == " ": + value = value[1:] if field == "event": event.event = value @@ -204,37 +237,36 @@ def _parse_event( data += value + "\n" publish = True elif field == "id": - event.id = value publish = True + if "\x00" not in value: + event_id = value elif field == "retry": - event.retry = int(value) if value.isdigit() else None + if value.isdigit(): + event.retry = int(value) publish = True + event.id = event_id + if sentinel and data == f"{sentinel}\n": - return None, True + return None, True, event_id if data: data = data[:-1] - event.data = data - - data_is_primitive = ( - data.isnumeric() or data == "true" or data == "false" or data == "null" - ) - data_is_json = ( - data.startswith("{") or data.startswith("[") or data.startswith('"') - ) - - if data_is_primitive or data_is_json: - try: - event.data = json.loads(data) - except Exception: - pass + try: + event.data = json.loads(data) + except json.JSONDecodeError: + event.data = data out = None if publish: - out = decoder(json.dumps(event.__dict__)) - - return out, False + out_dict = { + k: v + for k, v in asdict(event).items() + if v is not None or (k == "data" and data) + } + out = decoder(json.dumps(out_dict)) + + return out, False, event_id def _peek_sequence(position: int, buffer: bytearray, sequence: bytes): diff --git a/packages/gcp/src/mistralai/gcp/client/utils/forms.py b/packages/gcp/src/mistralai/gcp/client/utils/forms.py index f961e76b..1e550bd5 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/forms.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/forms.py @@ -142,7 +142,7 @@ def serialize_multipart_form( if field_metadata.file: if isinstance(val, List): # Handle array of files - array_field_name = f_name + "[]" + array_field_name = f_name for file_obj in val: if not _is_set(file_obj): continue @@ -185,7 +185,7 @@ def serialize_multipart_form( continue values.append(_val_to_string(value)) - array_field_name = f_name + "[]" + array_field_name = f_name form[array_field_name] = values else: form[f_name] = _val_to_string(val) diff --git a/packages/gcp/src/mistralai/gcp/client/utils/retries.py b/packages/gcp/src/mistralai/gcp/client/utils/retries.py index 88a91b10..af07d4e9 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/retries.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/retries.py @@ -144,12 +144,7 @@ def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise @@ -193,12 +188,7 @@ async def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise diff --git a/packages/gcp/src/mistralai/gcp/client/utils/security.py b/packages/gcp/src/mistralai/gcp/client/utils/security.py index 295a3f40..17996bd5 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/security.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/security.py @@ -135,6 +135,8 @@ def _parse_security_scheme_value( elif scheme_type == "http": if sub_type == "bearer": headers[header_name] = _apply_bearer(value) + elif sub_type == "basic": + headers[header_name] = value elif sub_type == "custom": return else: diff --git a/packages/gcp/src/mistralai/gcp/client/utils/unions.py b/packages/gcp/src/mistralai/gcp/client/utils/unions.py new file mode 100644 index 00000000..a227f4e8 --- /dev/null +++ b/packages/gcp/src/mistralai/gcp/client/utils/unions.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import Any + +from pydantic import BaseModel, TypeAdapter + + +def parse_open_union( + v: Any, + *, + disc_key: str, + variants: dict[str, Any], + unknown_cls: type, + union_name: str, +) -> Any: + """Parse an open discriminated union value with forward-compatibility. + + Known discriminator values are dispatched to their variant types. + Unknown discriminator values produce an instance of the fallback class, + preserving the raw payload for inspection. + """ + if isinstance(v, BaseModel): + return v + if not isinstance(v, dict) or disc_key not in v: + raise ValueError(f"{union_name}: expected object with '{disc_key}' field") + disc = v[disc_key] + variant_cls = variants.get(disc) + if variant_cls is not None: + if isinstance(variant_cls, type) and issubclass(variant_cls, BaseModel): + return variant_cls.model_validate(v) + return TypeAdapter(variant_cls).validate_python(v) + return unknown_cls(raw=v) diff --git a/packages/gcp/src/mistralai/gcp/client/utils/unmarshal_json_response.py b/packages/gcp/src/mistralai/gcp/client/utils/unmarshal_json_response.py index 83e8275e..ead3e5a0 100644 --- a/packages/gcp/src/mistralai/gcp/client/utils/unmarshal_json_response.py +++ b/packages/gcp/src/mistralai/gcp/client/utils/unmarshal_json_response.py @@ -5,7 +5,7 @@ import httpx from .serializers import unmarshal_json -from mistralai.gcp.client import models +from mistralai.gcp.client import errors T = TypeVar("T") @@ -30,7 +30,7 @@ def unmarshal_json_response( try: return unmarshal_json(body, typ) except Exception as e: - raise models.ResponseValidationError( + raise errors.ResponseValidationError( "Response validation failed", http_res, e, diff --git a/pylintrc b/pylintrc index 2dc62b0e..d1653ae1 100644 --- a/pylintrc +++ b/pylintrc @@ -641,7 +641,7 @@ additional-builtins= allow-global-unused-variables=yes # List of names allowed to shadow builtins -allowed-redefined-builtins=id,object +allowed-redefined-builtins=id,object,input,dir # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. diff --git a/src/mistralai/client/__init__.py b/src/mistralai/client/__init__.py index 481fc916..4b79610a 100644 --- a/src/mistralai/client/__init__.py +++ b/src/mistralai/client/__init__.py @@ -10,7 +10,6 @@ ) from .sdk import * from .sdkconfiguration import * -from .models import * VERSION: str = __version__ diff --git a/src/mistralai/client/_version.py b/src/mistralai/client/_version.py index 814d9ec7..1a4d15d6 100644 --- a/src/mistralai/client/_version.py +++ b/src/mistralai/client/_version.py @@ -4,10 +4,10 @@ import importlib.metadata __title__: str = "mistralai" -__version__: str = "2.0.0a3" +__version__: str = "2.0.0-a3.1" __openapi_doc_version__: str = "1.0.0" -__gen_version__: str = "2.794.1" -__user_agent__: str = "speakeasy-sdk/python 2.0.0a3 2.794.1 1.0.0 mistralai" +__gen_version__: str = "2.841.0" +__user_agent__: str = "speakeasy-sdk/python 2.0.0-a3.1 2.841.0 1.0.0 mistralai" try: if __package__ is not None: diff --git a/src/mistralai/client/accesses.py b/src/mistralai/client/accesses.py index cda484c8..0761b0bc 100644 --- a/src/mistralai/client/accesses.py +++ b/src/mistralai/client/accesses.py @@ -2,12 +2,8 @@ # @generated-id: 76fc53bfcf59 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - entitytype as models_entitytype, - shareenum as models_shareenum, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -46,7 +42,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListLibraryAccessesRequest( + request = models.LibrariesShareListV1Request( library_id=library_id, ) @@ -79,7 +75,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListLibraryAccesses", + operation_id="libraries_share_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -95,17 +91,17 @@ def list( return unmarshal_json_response(models.ListSharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -136,7 +132,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListLibraryAccessesRequest( + request = models.LibrariesShareListV1Request( library_id=library_id, ) @@ -169,7 +165,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListLibraryAccesses", + operation_id="libraries_share_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -185,25 +181,25 @@ async def list_async( return unmarshal_json_response(models.ListSharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update_or_create( self, *, library_id: str, - level: models_shareenum.ShareEnum, + level: models.ShareEnum, share_with_uuid: str, - share_with_type: models_entitytype.EntityType, + share_with_type: models.EntityType, org_id: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -234,7 +230,7 @@ def update_or_create( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateOrCreateLibraryAccessRequest( + request = models.LibrariesShareCreateV1Request( library_id=library_id, sharing_in=models.SharingIn( org_id=org_id, @@ -276,7 +272,7 @@ def update_or_create( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateOrCreateLibraryAccess", + operation_id="libraries_share_create_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -292,25 +288,25 @@ def update_or_create( return unmarshal_json_response(models.SharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_or_create_async( self, *, library_id: str, - level: models_shareenum.ShareEnum, + level: models.ShareEnum, share_with_uuid: str, - share_with_type: models_entitytype.EntityType, + share_with_type: models.EntityType, org_id: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -341,7 +337,7 @@ async def update_or_create_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateOrCreateLibraryAccessRequest( + request = models.LibrariesShareCreateV1Request( library_id=library_id, sharing_in=models.SharingIn( org_id=org_id, @@ -383,7 +379,7 @@ async def update_or_create_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateOrCreateLibraryAccess", + operation_id="libraries_share_create_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -399,24 +395,24 @@ async def update_or_create_async( return unmarshal_json_response(models.SharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, *, library_id: str, share_with_uuid: str, - share_with_type: models_entitytype.EntityType, + share_with_type: models.EntityType, org_id: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -446,7 +442,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteLibraryAccessRequest( + request = models.LibrariesShareDeleteV1Request( library_id=library_id, sharing_delete=models.SharingDelete( org_id=org_id, @@ -487,7 +483,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteLibraryAccess", + operation_id="libraries_share_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -503,24 +499,24 @@ def delete( return unmarshal_json_response(models.SharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, *, library_id: str, share_with_uuid: str, - share_with_type: models_entitytype.EntityType, + share_with_type: models.EntityType, org_id: OptionalNullable[str] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -550,7 +546,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteLibraryAccessRequest( + request = models.LibrariesShareDeleteV1Request( library_id=library_id, sharing_delete=models.SharingDelete( org_id=org_id, @@ -591,7 +587,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteLibraryAccess", + operation_id="libraries_share_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -607,14 +603,14 @@ async def delete_async( return unmarshal_json_response(models.SharingOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/agents.py b/src/mistralai/client/agents.py index 0942cb20..2b70d152 100644 --- a/src/mistralai/client/agents.py +++ b/src/mistralai/client/agents.py @@ -2,16 +2,8 @@ # @generated-id: e946546e3eaa from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - agentscompletionrequest as models_agentscompletionrequest, - agentscompletionstreamrequest as models_agentscompletionstreamrequest, - mistralpromptmode as models_mistralpromptmode, - prediction as models_prediction, - responseformat as models_responseformat, - tool as models_tool, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import eventstreaming, get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -25,47 +17,40 @@ def complete( self, *, messages: Union[ - List[models_agentscompletionrequest.AgentsCompletionRequestMessage], - List[ - models_agentscompletionrequest.AgentsCompletionRequestMessageTypedDict - ], + List[models.AgentsCompletionRequestMessage], + List[models.AgentsCompletionRequestMessageTypedDict], ], agent_id: str, max_tokens: OptionalNullable[int] = UNSET, stream: Optional[bool] = False, stop: Optional[ Union[ - models_agentscompletionrequest.AgentsCompletionRequestStop, - models_agentscompletionrequest.AgentsCompletionRequestStopTypedDict, + models.AgentsCompletionRequestStop, + models.AgentsCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_agentscompletionrequest.AgentsCompletionRequestToolChoice, - models_agentscompletionrequest.AgentsCompletionRequestToolChoiceTypedDict, + models.AgentsCompletionRequestToolChoice, + models.AgentsCompletionRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -179,63 +164,56 @@ def complete( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, *, messages: Union[ - List[models_agentscompletionrequest.AgentsCompletionRequestMessage], - List[ - models_agentscompletionrequest.AgentsCompletionRequestMessageTypedDict - ], + List[models.AgentsCompletionRequestMessage], + List[models.AgentsCompletionRequestMessageTypedDict], ], agent_id: str, max_tokens: OptionalNullable[int] = UNSET, stream: Optional[bool] = False, stop: Optional[ Union[ - models_agentscompletionrequest.AgentsCompletionRequestStop, - models_agentscompletionrequest.AgentsCompletionRequestStopTypedDict, + models.AgentsCompletionRequestStop, + models.AgentsCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_agentscompletionrequest.AgentsCompletionRequestToolChoice, - models_agentscompletionrequest.AgentsCompletionRequestToolChoiceTypedDict, + models.AgentsCompletionRequestToolChoice, + models.AgentsCompletionRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -349,65 +327,56 @@ async def complete_async( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def stream( self, *, messages: Union[ - List[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestMessage - ], - List[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestMessageTypedDict - ], + List[models.AgentsCompletionStreamRequestMessage], + List[models.AgentsCompletionStreamRequestMessageTypedDict], ], agent_id: str, max_tokens: OptionalNullable[int] = UNSET, stream: Optional[bool] = True, stop: Optional[ Union[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestStop, - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestStopTypedDict, + models.AgentsCompletionStreamRequestStop, + models.AgentsCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestToolChoice, - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestToolChoiceTypedDict, + models.AgentsCompletionStreamRequestToolChoice, + models.AgentsCompletionStreamRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -530,66 +499,57 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, *, messages: Union[ - List[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestMessage - ], - List[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestMessageTypedDict - ], + List[models.AgentsCompletionStreamRequestMessage], + List[models.AgentsCompletionStreamRequestMessageTypedDict], ], agent_id: str, max_tokens: OptionalNullable[int] = UNSET, stream: Optional[bool] = True, stop: Optional[ Union[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestStop, - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestStopTypedDict, + models.AgentsCompletionStreamRequestStop, + models.AgentsCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestToolChoice, - models_agentscompletionstreamrequest.AgentsCompletionStreamRequestToolChoiceTypedDict, + models.AgentsCompletionStreamRequestToolChoice, + models.AgentsCompletionStreamRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -712,15 +672,15 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) diff --git a/src/mistralai/client/basesdk.py b/src/mistralai/client/basesdk.py index 611b4059..a976121b 100644 --- a/src/mistralai/client/basesdk.py +++ b/src/mistralai/client/basesdk.py @@ -3,13 +3,18 @@ from .sdkconfiguration import SDKConfiguration import httpx -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import ( AfterErrorContext, AfterSuccessContext, BeforeRequestContext, ) -from mistralai.client.utils import RetryConfig, SerializedRequestBody, get_body_content +from mistralai.client.utils import ( + RetryConfig, + SerializedRequestBody, + get_body_content, + run_sync_in_thread, +) from typing import Callable, List, Mapping, Optional, Tuple from urllib.parse import parse_qs, urlparse @@ -261,7 +266,7 @@ def do(): if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -282,7 +287,7 @@ def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -312,7 +317,10 @@ async def do_request_async( async def do(): http_res = None try: - req = hooks.before_request(BeforeRequestContext(hook_ctx), request) + req = await run_sync_in_thread( + hooks.before_request, BeforeRequestContext(hook_ctx), request + ) + logger.debug( "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", req.method, @@ -326,14 +334,17 @@ async def do(): http_res = await client.send(req, stream=stream) except Exception as e: - _, e = hooks.after_error(AfterErrorContext(hook_ctx), None, e) + _, e = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), None, e + ) + if e is not None: logger.debug("Request Exception", exc_info=True) raise e if http_res is None: logger.debug("Raising no response SDK error") - raise models.NoResponseError("No response received") + raise errors.NoResponseError("No response received") logger.debug( "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", @@ -344,9 +355,10 @@ async def do(): ) if utils.match_status_codes(error_status_codes, http_res.status_code): - result, err = hooks.after_error( - AfterErrorContext(hook_ctx), http_res, None + result, err = await run_sync_in_thread( + hooks.after_error, AfterErrorContext(hook_ctx), http_res, None ) + if err is not None: logger.debug("Request Exception", exc_info=True) raise err @@ -354,7 +366,7 @@ async def do(): http_res = result else: logger.debug("Raising unexpected SDK error") - raise models.SDKError("Unexpected error occurred", http_res) + raise errors.SDKError("Unexpected error occurred", http_res) return http_res @@ -366,6 +378,8 @@ async def do(): http_res = await do() if not utils.match_status_codes(error_status_codes, http_res.status_code): - http_res = hooks.after_success(AfterSuccessContext(hook_ctx), http_res) + http_res = await run_sync_in_thread( + hooks.after_success, AfterSuccessContext(hook_ctx), http_res + ) return http_res diff --git a/src/mistralai/client/batch_jobs.py b/src/mistralai/client/batch_jobs.py index 752c7652..0e135b30 100644 --- a/src/mistralai/client/batch_jobs.py +++ b/src/mistralai/client/batch_jobs.py @@ -3,14 +3,8 @@ from .basesdk import BaseSDK from datetime import datetime -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - apiendpoint as models_apiendpoint, - batchjobstatus as models_batchjobstatus, - batchrequest as models_batchrequest, - listbatchjobsop as models_listbatchjobsop, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -28,13 +22,13 @@ def list( metadata: OptionalNullable[Dict[str, Any]] = UNSET, created_after: OptionalNullable[datetime] = UNSET, created_by_me: Optional[bool] = False, - status: OptionalNullable[List[models_batchjobstatus.BatchJobStatus]] = UNSET, - order_by: Optional[models_listbatchjobsop.OrderBy] = "-created", + status: OptionalNullable[List[models.BatchJobStatus]] = UNSET, + order_by: Optional[models.OrderBy] = "-created", retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobsOut: + ) -> models.ListBatchJobsResponse: r"""Get Batch Jobs Get a list of batch jobs for your organization and user. @@ -63,7 +57,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListBatchJobsRequest( + request = models.JobsAPIRoutesBatchGetBatchJobsRequest( page=page, page_size=page_size, model=model, @@ -104,7 +98,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListBatchJobs", + operation_id="jobs_api_routes_batch_get_batch_jobs", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -116,15 +110,15 @@ def list( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobsOut, http_res) + return unmarshal_json_response(models.ListBatchJobsResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -136,13 +130,13 @@ async def list_async( metadata: OptionalNullable[Dict[str, Any]] = UNSET, created_after: OptionalNullable[datetime] = UNSET, created_by_me: Optional[bool] = False, - status: OptionalNullable[List[models_batchjobstatus.BatchJobStatus]] = UNSET, - order_by: Optional[models_listbatchjobsop.OrderBy] = "-created", + status: OptionalNullable[List[models.BatchJobStatus]] = UNSET, + order_by: Optional[models.OrderBy] = "-created", retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobsOut: + ) -> models.ListBatchJobsResponse: r"""Get Batch Jobs Get a list of batch jobs for your organization and user. @@ -171,7 +165,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListBatchJobsRequest( + request = models.JobsAPIRoutesBatchGetBatchJobsRequest( page=page, page_size=page_size, model=model, @@ -212,7 +206,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListBatchJobs", + operation_id="jobs_api_routes_batch_get_batch_jobs", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -224,26 +218,23 @@ async def list_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobsOut, http_res) + return unmarshal_json_response(models.ListBatchJobsResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def create( self, *, - endpoint: models_apiendpoint.APIEndpoint, + endpoint: models.APIEndpoint, input_files: OptionalNullable[List[str]] = UNSET, requests: OptionalNullable[ - Union[ - List[models_batchrequest.BatchRequest], - List[models_batchrequest.BatchRequestTypedDict], - ] + Union[List[models.BatchRequest], List[models.BatchRequestTypedDict]] ] = UNSET, model: OptionalNullable[str] = UNSET, agent_id: OptionalNullable[str] = UNSET, @@ -253,7 +244,7 @@ def create( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Create Batch Job Create a new batch job, it will be queued for processing. @@ -280,7 +271,7 @@ def create( else: base_url = self._get_url(base_url, url_variables) - request = models.BatchJobIn( + request = models.CreateBatchJobRequest( input_files=input_files, requests=utils.get_pydantic_model( requests, OptionalNullable[List[models.BatchRequest]] @@ -306,7 +297,7 @@ def create( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.BatchJobIn + request, False, False, "json", models.CreateBatchJobRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -324,7 +315,7 @@ def create( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateBatchJob", + operation_id="jobs_api_routes_batch_create_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -336,26 +327,23 @@ def create( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_async( self, *, - endpoint: models_apiendpoint.APIEndpoint, + endpoint: models.APIEndpoint, input_files: OptionalNullable[List[str]] = UNSET, requests: OptionalNullable[ - Union[ - List[models_batchrequest.BatchRequest], - List[models_batchrequest.BatchRequestTypedDict], - ] + Union[List[models.BatchRequest], List[models.BatchRequestTypedDict]] ] = UNSET, model: OptionalNullable[str] = UNSET, agent_id: OptionalNullable[str] = UNSET, @@ -365,7 +353,7 @@ async def create_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Create Batch Job Create a new batch job, it will be queued for processing. @@ -392,7 +380,7 @@ async def create_async( else: base_url = self._get_url(base_url, url_variables) - request = models.BatchJobIn( + request = models.CreateBatchJobRequest( input_files=input_files, requests=utils.get_pydantic_model( requests, OptionalNullable[List[models.BatchRequest]] @@ -418,7 +406,7 @@ async def create_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.BatchJobIn + request, False, False, "json", models.CreateBatchJobRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -436,7 +424,7 @@ async def create_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateBatchJob", + operation_id="jobs_api_routes_batch_create_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -448,15 +436,15 @@ async def create_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -467,7 +455,7 @@ def get( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Get Batch Job Get a batch job details by its UUID. @@ -492,7 +480,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetBatchJobRequest( + request = models.JobsAPIRoutesBatchGetBatchJobRequest( job_id=job_id, inline=inline, ) @@ -526,7 +514,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetBatchJob", + operation_id="jobs_api_routes_batch_get_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -538,15 +526,15 @@ def get( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -557,7 +545,7 @@ async def get_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Get Batch Job Get a batch job details by its UUID. @@ -582,7 +570,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetBatchJobRequest( + request = models.JobsAPIRoutesBatchGetBatchJobRequest( job_id=job_id, inline=inline, ) @@ -616,7 +604,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetBatchJob", + operation_id="jobs_api_routes_batch_get_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -628,15 +616,15 @@ async def get_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def cancel( self, @@ -646,7 +634,7 @@ def cancel( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Cancel Batch Job Request the cancellation of a batch job. @@ -667,7 +655,7 @@ def cancel( else: base_url = self._get_url(base_url, url_variables) - request = models.CancelBatchJobRequest( + request = models.JobsAPIRoutesBatchCancelBatchJobRequest( job_id=job_id, ) @@ -700,7 +688,7 @@ def cancel( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CancelBatchJob", + operation_id="jobs_api_routes_batch_cancel_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -712,15 +700,15 @@ def cancel( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def cancel_async( self, @@ -730,7 +718,7 @@ async def cancel_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.BatchJobOut: + ) -> models.BatchJob: r"""Cancel Batch Job Request the cancellation of a batch job. @@ -751,7 +739,7 @@ async def cancel_async( else: base_url = self._get_url(base_url, url_variables) - request = models.CancelBatchJobRequest( + request = models.JobsAPIRoutesBatchCancelBatchJobRequest( job_id=job_id, ) @@ -784,7 +772,7 @@ async def cancel_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CancelBatchJob", + operation_id="jobs_api_routes_batch_cancel_batch_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -796,12 +784,12 @@ async def cancel_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.BatchJobOut, http_res) + return unmarshal_json_response(models.BatchJob, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/beta_agents.py b/src/mistralai/client/beta_agents.py index 4e692f17..157c5de4 100644 --- a/src/mistralai/client/beta_agents.py +++ b/src/mistralai/client/beta_agents.py @@ -2,15 +2,8 @@ # @generated-id: b64ad29b7174 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - agentcreationrequest as models_agentcreationrequest, - agentupdaterequest as models_agentupdaterequest, - completionargs as models_completionargs, - getagentop as models_getagentop, - requestsource as models_requestsource, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -28,15 +21,12 @@ def create( instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_agentcreationrequest.AgentCreationRequestTool], - List[models_agentcreationrequest.AgentCreationRequestToolTypedDict], + List[models.CreateAgentRequestTool], + List[models.CreateAgentRequestToolTypedDict], ] ] = None, completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, description: OptionalNullable[str] = UNSET, handoffs: OptionalNullable[List[str]] = UNSET, @@ -75,10 +65,10 @@ def create( else: base_url = self._get_url(base_url, url_variables) - request = models.AgentCreationRequest( + request = models.CreateAgentRequest( instructions=instructions, tools=utils.get_pydantic_model( - tools, Optional[List[models.AgentCreationRequestTool]] + tools, Optional[List[models.CreateAgentRequestTool]] ), completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] @@ -105,7 +95,7 @@ def create( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.AgentCreationRequest + request, False, False, "json", models.CreateAgentRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -123,7 +113,7 @@ def create( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateAgent", + operation_id="agents_api_v1_agents_create", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -139,17 +129,17 @@ def create( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_async( self, @@ -159,15 +149,12 @@ async def create_async( instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_agentcreationrequest.AgentCreationRequestTool], - List[models_agentcreationrequest.AgentCreationRequestToolTypedDict], + List[models.CreateAgentRequestTool], + List[models.CreateAgentRequestToolTypedDict], ] ] = None, completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, description: OptionalNullable[str] = UNSET, handoffs: OptionalNullable[List[str]] = UNSET, @@ -206,10 +193,10 @@ async def create_async( else: base_url = self._get_url(base_url, url_variables) - request = models.AgentCreationRequest( + request = models.CreateAgentRequest( instructions=instructions, tools=utils.get_pydantic_model( - tools, Optional[List[models.AgentCreationRequestTool]] + tools, Optional[List[models.CreateAgentRequestTool]] ), completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] @@ -236,7 +223,7 @@ async def create_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.AgentCreationRequest + request, False, False, "json", models.CreateAgentRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -254,7 +241,7 @@ async def create_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateAgent", + operation_id="agents_api_v1_agents_create", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -270,17 +257,17 @@ async def create_async( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def list( self, @@ -288,7 +275,7 @@ def list( page: Optional[int] = 0, page_size: Optional[int] = 20, deployment_chat: OptionalNullable[bool] = UNSET, - sources: OptionalNullable[List[models_requestsource.RequestSource]] = UNSET, + sources: OptionalNullable[List[models.RequestSource]] = UNSET, name: OptionalNullable[str] = UNSET, search: OptionalNullable[str] = UNSET, id: OptionalNullable[str] = UNSET, @@ -325,7 +312,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentsRequest( + request = models.AgentsAPIV1AgentsListRequest( page=page, page_size=page_size, deployment_chat=deployment_chat, @@ -365,7 +352,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgents", + operation_id="agents_api_v1_agents_list", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -381,17 +368,17 @@ def list( return unmarshal_json_response(List[models.Agent], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -399,7 +386,7 @@ async def list_async( page: Optional[int] = 0, page_size: Optional[int] = 20, deployment_chat: OptionalNullable[bool] = UNSET, - sources: OptionalNullable[List[models_requestsource.RequestSource]] = UNSET, + sources: OptionalNullable[List[models.RequestSource]] = UNSET, name: OptionalNullable[str] = UNSET, search: OptionalNullable[str] = UNSET, id: OptionalNullable[str] = UNSET, @@ -436,7 +423,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentsRequest( + request = models.AgentsAPIV1AgentsListRequest( page=page, page_size=page_size, deployment_chat=deployment_chat, @@ -476,7 +463,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgents", + operation_id="agents_api_v1_agents_list", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -492,17 +479,17 @@ async def list_async( return unmarshal_json_response(List[models.Agent], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -510,8 +497,8 @@ def get( agent_id: str, agent_version: OptionalNullable[ Union[ - models_getagentop.GetAgentAgentVersion, - models_getagentop.GetAgentAgentVersionTypedDict, + models.AgentsAPIV1AgentsGetAgentVersion, + models.AgentsAPIV1AgentsGetAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -540,7 +527,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetAgentRequest( + request = models.AgentsAPIV1AgentsGetRequest( agent_id=agent_id, agent_version=agent_version, ) @@ -574,7 +561,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetAgent", + operation_id="agents_api_v1_agents_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -590,17 +577,17 @@ def get( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -608,8 +595,8 @@ async def get_async( agent_id: str, agent_version: OptionalNullable[ Union[ - models_getagentop.GetAgentAgentVersion, - models_getagentop.GetAgentAgentVersionTypedDict, + models.AgentsAPIV1AgentsGetAgentVersion, + models.AgentsAPIV1AgentsGetAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -638,7 +625,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetAgentRequest( + request = models.AgentsAPIV1AgentsGetRequest( agent_id=agent_id, agent_version=agent_version, ) @@ -672,7 +659,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetAgent", + operation_id="agents_api_v1_agents_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -688,17 +675,17 @@ async def get_async( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update( self, @@ -707,15 +694,12 @@ def update( instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_agentupdaterequest.AgentUpdateRequestTool], - List[models_agentupdaterequest.AgentUpdateRequestToolTypedDict], + List[models.UpdateAgentRequestTool], + List[models.UpdateAgentRequestToolTypedDict], ] ] = None, completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, model: OptionalNullable[str] = UNSET, name: OptionalNullable[str] = UNSET, @@ -759,12 +743,12 @@ def update( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateAgentRequest( + request = models.AgentsAPIV1AgentsUpdateRequest( agent_id=agent_id, - agent_update_request=models.AgentUpdateRequest( + update_agent_request=models.UpdateAgentRequest( instructions=instructions, tools=utils.get_pydantic_model( - tools, Optional[List[models.AgentUpdateRequestTool]] + tools, Optional[List[models.UpdateAgentRequestTool]] ), completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] @@ -793,11 +777,11 @@ def update( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.agent_update_request, + request.update_agent_request, False, False, "json", - models.AgentUpdateRequest, + models.UpdateAgentRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -815,7 +799,7 @@ def update( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateAgent", + operation_id="agents_api_v1_agents_update", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -831,17 +815,17 @@ def update( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_async( self, @@ -850,15 +834,12 @@ async def update_async( instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_agentupdaterequest.AgentUpdateRequestTool], - List[models_agentupdaterequest.AgentUpdateRequestToolTypedDict], + List[models.UpdateAgentRequestTool], + List[models.UpdateAgentRequestToolTypedDict], ] ] = None, completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, model: OptionalNullable[str] = UNSET, name: OptionalNullable[str] = UNSET, @@ -902,12 +883,12 @@ async def update_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateAgentRequest( + request = models.AgentsAPIV1AgentsUpdateRequest( agent_id=agent_id, - agent_update_request=models.AgentUpdateRequest( + update_agent_request=models.UpdateAgentRequest( instructions=instructions, tools=utils.get_pydantic_model( - tools, Optional[List[models.AgentUpdateRequestTool]] + tools, Optional[List[models.UpdateAgentRequestTool]] ), completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] @@ -936,11 +917,11 @@ async def update_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.agent_update_request, + request.update_agent_request, False, False, "json", - models.AgentUpdateRequest, + models.UpdateAgentRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -958,7 +939,7 @@ async def update_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateAgent", + operation_id="agents_api_v1_agents_update", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -974,17 +955,17 @@ async def update_async( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -1013,7 +994,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteAgentRequest( + request = models.AgentsAPIV1AgentsDeleteRequest( agent_id=agent_id, ) @@ -1046,7 +1027,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteAgent", + operation_id="agents_api_v1_agents_delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1062,17 +1043,17 @@ def delete( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -1101,7 +1082,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteAgentRequest( + request = models.AgentsAPIV1AgentsDeleteRequest( agent_id=agent_id, ) @@ -1134,7 +1115,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteAgent", + operation_id="agents_api_v1_agents_delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1150,17 +1131,17 @@ async def delete_async( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update_version( self, @@ -1193,7 +1174,7 @@ def update_version( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateAgentVersionRequest( + request = models.AgentsAPIV1AgentsUpdateVersionRequest( agent_id=agent_id, version=version, ) @@ -1227,7 +1208,7 @@ def update_version( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateAgentVersion", + operation_id="agents_api_v1_agents_update_version", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1243,17 +1224,17 @@ def update_version( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_version_async( self, @@ -1286,7 +1267,7 @@ async def update_version_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateAgentVersionRequest( + request = models.AgentsAPIV1AgentsUpdateVersionRequest( agent_id=agent_id, version=version, ) @@ -1320,7 +1301,7 @@ async def update_version_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateAgentVersion", + operation_id="agents_api_v1_agents_update_version", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1336,17 +1317,17 @@ async def update_version_async( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def list_versions( self, @@ -1381,7 +1362,7 @@ def list_versions( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentVersionsRequest( + request = models.AgentsAPIV1AgentsListVersionsRequest( agent_id=agent_id, page=page, page_size=page_size, @@ -1416,7 +1397,7 @@ def list_versions( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgentVersions", + operation_id="agents_api_v1_agents_list_versions", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1432,17 +1413,17 @@ def list_versions( return unmarshal_json_response(List[models.Agent], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_versions_async( self, @@ -1477,7 +1458,7 @@ async def list_versions_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentVersionsRequest( + request = models.AgentsAPIV1AgentsListVersionsRequest( agent_id=agent_id, page=page, page_size=page_size, @@ -1512,7 +1493,7 @@ async def list_versions_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgentVersions", + operation_id="agents_api_v1_agents_list_versions", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1528,17 +1509,17 @@ async def list_versions_async( return unmarshal_json_response(List[models.Agent], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get_version( self, @@ -1571,7 +1552,7 @@ def get_version( else: base_url = self._get_url(base_url, url_variables) - request = models.GetAgentVersionRequest( + request = models.AgentsAPIV1AgentsGetVersionRequest( agent_id=agent_id, version=version, ) @@ -1605,7 +1586,7 @@ def get_version( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetAgentVersion", + operation_id="agents_api_v1_agents_get_version", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1621,17 +1602,17 @@ def get_version( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_version_async( self, @@ -1664,7 +1645,7 @@ async def get_version_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetAgentVersionRequest( + request = models.AgentsAPIV1AgentsGetVersionRequest( agent_id=agent_id, version=version, ) @@ -1698,7 +1679,7 @@ async def get_version_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetAgentVersion", + operation_id="agents_api_v1_agents_get_version", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1714,17 +1695,17 @@ async def get_version_async( return unmarshal_json_response(models.Agent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def create_version_alias( self, @@ -1759,7 +1740,7 @@ def create_version_alias( else: base_url = self._get_url(base_url, url_variables) - request = models.CreateOrUpdateAgentAliasRequest( + request = models.AgentsAPIV1AgentsCreateOrUpdateAliasRequest( agent_id=agent_id, alias=alias, version=version, @@ -1794,7 +1775,7 @@ def create_version_alias( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateOrUpdateAgentAlias", + operation_id="agents_api_v1_agents_create_or_update_alias", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1810,17 +1791,17 @@ def create_version_alias( return unmarshal_json_response(models.AgentAliasResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_version_alias_async( self, @@ -1855,7 +1836,7 @@ async def create_version_alias_async( else: base_url = self._get_url(base_url, url_variables) - request = models.CreateOrUpdateAgentAliasRequest( + request = models.AgentsAPIV1AgentsCreateOrUpdateAliasRequest( agent_id=agent_id, alias=alias, version=version, @@ -1890,7 +1871,7 @@ async def create_version_alias_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateOrUpdateAgentAlias", + operation_id="agents_api_v1_agents_create_or_update_alias", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1906,17 +1887,17 @@ async def create_version_alias_async( return unmarshal_json_response(models.AgentAliasResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def list_version_aliases( self, @@ -1947,7 +1928,7 @@ def list_version_aliases( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentAliasesRequest( + request = models.AgentsAPIV1AgentsListVersionAliasesRequest( agent_id=agent_id, ) @@ -1980,7 +1961,7 @@ def list_version_aliases( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgentAliases", + operation_id="agents_api_v1_agents_list_version_aliases", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1996,17 +1977,17 @@ def list_version_aliases( return unmarshal_json_response(List[models.AgentAliasResponse], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_version_aliases_async( self, @@ -2037,7 +2018,7 @@ async def list_version_aliases_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListAgentAliasesRequest( + request = models.AgentsAPIV1AgentsListVersionAliasesRequest( agent_id=agent_id, ) @@ -2070,7 +2051,7 @@ async def list_version_aliases_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListAgentAliases", + operation_id="agents_api_v1_agents_list_version_aliases", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2086,17 +2067,17 @@ async def list_version_aliases_async( return unmarshal_json_response(List[models.AgentAliasResponse], http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete_version_alias( self, @@ -2129,7 +2110,7 @@ def delete_version_alias( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteAgentAliasRequest( + request = models.AgentsAPIV1AgentsDeleteAliasRequest( agent_id=agent_id, alias=alias, ) @@ -2163,7 +2144,7 @@ def delete_version_alias( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteAgentAlias", + operation_id="agents_api_v1_agents_delete_alias", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2179,17 +2160,17 @@ def delete_version_alias( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_version_alias_async( self, @@ -2222,7 +2203,7 @@ async def delete_version_alias_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteAgentAliasRequest( + request = models.AgentsAPIV1AgentsDeleteAliasRequest( agent_id=agent_id, alias=alias, ) @@ -2256,7 +2237,7 @@ async def delete_version_alias_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteAgentAlias", + operation_id="agents_api_v1_agents_delete_alias", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2272,14 +2253,14 @@ async def delete_version_alias_async( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/chat.py b/src/mistralai/client/chat.py index 35698d32..13b9c01f 100644 --- a/src/mistralai/client/chat.py +++ b/src/mistralai/client/chat.py @@ -2,16 +2,8 @@ # @generated-id: 7eba0f088d47 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - chatcompletionrequest as models_chatcompletionrequest, - chatcompletionstreamrequest as models_chatcompletionstreamrequest, - mistralpromptmode as models_mistralpromptmode, - prediction as models_prediction, - responseformat as models_responseformat, - tool as models_tool, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import eventstreaming, get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -111,8 +103,8 @@ def complete( *, model: str, messages: Union[ - List[models_chatcompletionrequest.ChatCompletionRequestMessage], - List[models_chatcompletionrequest.ChatCompletionRequestMessageTypedDict], + List[models.ChatCompletionRequestMessage], + List[models.ChatCompletionRequestMessageTypedDict], ], temperature: OptionalNullable[float] = UNSET, top_p: Optional[float] = None, @@ -120,37 +112,32 @@ def complete( stream: Optional[bool] = False, stop: Optional[ Union[ - models_chatcompletionrequest.ChatCompletionRequestStop, - models_chatcompletionrequest.ChatCompletionRequestStopTypedDict, + models.ChatCompletionRequestStop, + models.ChatCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_chatcompletionrequest.ChatCompletionRequestToolChoice, - models_chatcompletionrequest.ChatCompletionRequestToolChoiceTypedDict, + models.ChatCompletionRequestToolChoice, + models.ChatCompletionRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -271,25 +258,25 @@ def complete( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, *, model: str, messages: Union[ - List[models_chatcompletionrequest.ChatCompletionRequestMessage], - List[models_chatcompletionrequest.ChatCompletionRequestMessageTypedDict], + List[models.ChatCompletionRequestMessage], + List[models.ChatCompletionRequestMessageTypedDict], ], temperature: OptionalNullable[float] = UNSET, top_p: Optional[float] = None, @@ -297,37 +284,32 @@ async def complete_async( stream: Optional[bool] = False, stop: Optional[ Union[ - models_chatcompletionrequest.ChatCompletionRequestStop, - models_chatcompletionrequest.ChatCompletionRequestStopTypedDict, + models.ChatCompletionRequestStop, + models.ChatCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_chatcompletionrequest.ChatCompletionRequestToolChoice, - models_chatcompletionrequest.ChatCompletionRequestToolChoiceTypedDict, + models.ChatCompletionRequestToolChoice, + models.ChatCompletionRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -448,27 +430,25 @@ async def complete_async( return unmarshal_json_response(models.ChatCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def stream( self, *, model: str, messages: Union[ - List[models_chatcompletionstreamrequest.ChatCompletionStreamRequestMessage], - List[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestMessageTypedDict - ], + List[models.ChatCompletionStreamRequestMessage], + List[models.ChatCompletionStreamRequestMessageTypedDict], ], temperature: OptionalNullable[float] = UNSET, top_p: Optional[float] = None, @@ -476,37 +456,32 @@ def stream( stream: Optional[bool] = True, stop: Optional[ Union[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestStop, - models_chatcompletionstreamrequest.ChatCompletionStreamRequestStopTypedDict, + models.ChatCompletionStreamRequestStop, + models.ChatCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestToolChoice, - models_chatcompletionstreamrequest.ChatCompletionStreamRequestToolChoiceTypedDict, + models.ChatCompletionStreamRequestToolChoice, + models.ChatCompletionStreamRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -636,28 +611,26 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, *, model: str, messages: Union[ - List[models_chatcompletionstreamrequest.ChatCompletionStreamRequestMessage], - List[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestMessageTypedDict - ], + List[models.ChatCompletionStreamRequestMessage], + List[models.ChatCompletionStreamRequestMessageTypedDict], ], temperature: OptionalNullable[float] = UNSET, top_p: Optional[float] = None, @@ -665,37 +638,32 @@ async def stream_async( stream: Optional[bool] = True, stop: Optional[ Union[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestStop, - models_chatcompletionstreamrequest.ChatCompletionStreamRequestStopTypedDict, + models.ChatCompletionStreamRequestStop, + models.ChatCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, metadata: OptionalNullable[Dict[str, Any]] = UNSET, response_format: Optional[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = None, tools: OptionalNullable[ - Union[List[models_tool.Tool], List[models_tool.ToolTypedDict]] + Union[List[models.Tool], List[models.ToolTypedDict]] ] = UNSET, tool_choice: Optional[ Union[ - models_chatcompletionstreamrequest.ChatCompletionStreamRequestToolChoice, - models_chatcompletionstreamrequest.ChatCompletionStreamRequestToolChoiceTypedDict, + models.ChatCompletionStreamRequestToolChoice, + models.ChatCompletionStreamRequestToolChoiceTypedDict, ] ] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, n: OptionalNullable[int] = UNSET, prediction: Optional[ - Union[models_prediction.Prediction, models_prediction.PredictionTypedDict] + Union[models.Prediction, models.PredictionTypedDict] ] = None, parallel_tool_calls: Optional[bool] = None, - prompt_mode: OptionalNullable[ - models_mistralpromptmode.MistralPromptMode - ] = UNSET, + prompt_mode: OptionalNullable[models.MistralPromptMode] = UNSET, safe_prompt: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, @@ -825,15 +793,15 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) diff --git a/src/mistralai/client/classifiers.py b/src/mistralai/client/classifiers.py index 3407c4b7..67199b60 100644 --- a/src/mistralai/client/classifiers.py +++ b/src/mistralai/client/classifiers.py @@ -2,13 +2,8 @@ # @generated-id: 26e773725732 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - chatmoderationrequest as models_chatmoderationrequest, - classificationrequest as models_classificationrequest, - inputs as models_inputs, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -23,8 +18,8 @@ def moderate( *, model: str, inputs: Union[ - models_classificationrequest.ClassificationRequestInputs, - models_classificationrequest.ClassificationRequestInputsTypedDict, + models.ClassificationRequestInputs, + models.ClassificationRequestInputsTypedDict, ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -106,25 +101,25 @@ def moderate( return unmarshal_json_response(models.ModerationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def moderate_async( self, *, model: str, inputs: Union[ - models_classificationrequest.ClassificationRequestInputs, - models_classificationrequest.ClassificationRequestInputsTypedDict, + models.ClassificationRequestInputs, + models.ClassificationRequestInputsTypedDict, ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -206,24 +201,24 @@ async def moderate_async( return unmarshal_json_response(models.ModerationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def moderate_chat( self, *, inputs: Union[ - models_chatmoderationrequest.ChatModerationRequestInputs3, - models_chatmoderationrequest.ChatModerationRequestInputs3TypedDict, + models.ChatModerationRequestInputs3, + models.ChatModerationRequestInputs3TypedDict, ], model: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -305,24 +300,24 @@ def moderate_chat( return unmarshal_json_response(models.ModerationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def moderate_chat_async( self, *, inputs: Union[ - models_chatmoderationrequest.ChatModerationRequestInputs3, - models_chatmoderationrequest.ChatModerationRequestInputs3TypedDict, + models.ChatModerationRequestInputs3, + models.ChatModerationRequestInputs3TypedDict, ], model: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -404,25 +399,25 @@ async def moderate_chat_async( return unmarshal_json_response(models.ModerationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def classify( self, *, model: str, inputs: Union[ - models_classificationrequest.ClassificationRequestInputs, - models_classificationrequest.ClassificationRequestInputsTypedDict, + models.ClassificationRequestInputs, + models.ClassificationRequestInputsTypedDict, ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -504,25 +499,25 @@ def classify( return unmarshal_json_response(models.ClassificationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def classify_async( self, *, model: str, inputs: Union[ - models_classificationrequest.ClassificationRequestInputs, - models_classificationrequest.ClassificationRequestInputsTypedDict, + models.ClassificationRequestInputs, + models.ClassificationRequestInputsTypedDict, ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -604,23 +599,23 @@ async def classify_async( return unmarshal_json_response(models.ClassificationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def classify_chat( self, *, model: str, - inputs: Union[models_inputs.Inputs, models_inputs.InputsTypedDict], + input: Union[models.Inputs, models.InputsTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -629,7 +624,7 @@ def classify_chat( r"""Chat Classifications :param model: - :param inputs: Chat to classify + :param input: Chat to classify :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -647,7 +642,7 @@ def classify_chat( request = models.ChatClassificationRequest( model=model, - inputs=utils.get_pydantic_model(inputs, models.Inputs), + input=utils.get_pydantic_model(input, models.Inputs), ) req = self._build_request( @@ -698,23 +693,23 @@ def classify_chat( return unmarshal_json_response(models.ClassificationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def classify_chat_async( self, *, model: str, - inputs: Union[models_inputs.Inputs, models_inputs.InputsTypedDict], + input: Union[models.Inputs, models.InputsTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -723,7 +718,7 @@ async def classify_chat_async( r"""Chat Classifications :param model: - :param inputs: Chat to classify + :param input: Chat to classify :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -741,7 +736,7 @@ async def classify_chat_async( request = models.ChatClassificationRequest( model=model, - inputs=utils.get_pydantic_model(inputs, models.Inputs), + input=utils.get_pydantic_model(input, models.Inputs), ) req = self._build_request_async( @@ -792,14 +787,14 @@ async def classify_chat_async( return unmarshal_json_response(models.ClassificationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/conversations.py b/src/mistralai/client/conversations.py index 646b91f3..ec33b1fb 100644 --- a/src/mistralai/client/conversations.py +++ b/src/mistralai/client/conversations.py @@ -2,18 +2,8 @@ # @generated-id: 40692a878064 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - completionargs as models_completionargs, - conversationappendrequest as models_conversationappendrequest, - conversationappendstreamrequest as models_conversationappendstreamrequest, - conversationinputs as models_conversationinputs, - conversationrequest as models_conversationrequest, - conversationrestartrequest as models_conversationrestartrequest, - conversationrestartstreamrequest as models_conversationrestartstreamrequest, - conversationstreamrequest as models_conversationstreamrequest, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import eventstreaming, get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -238,27 +228,21 @@ async def run_generator() -> ( def start( self, *, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Union[models.ConversationInputs, models.ConversationInputsTypedDict], stream: Optional[bool] = False, store: OptionalNullable[bool] = UNSET, handoff_execution: OptionalNullable[ - models_conversationrequest.ConversationRequestHandoffExecution + models.ConversationRequestHandoffExecution ] = UNSET, instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_conversationrequest.ConversationRequestTool], - List[models_conversationrequest.ConversationRequestToolTypedDict], + List[models.ConversationRequestTool], + List[models.ConversationRequestToolTypedDict], ] ] = None, completion_args: OptionalNullable[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = UNSET, name: OptionalNullable[str] = UNSET, description: OptionalNullable[str] = UNSET, @@ -266,8 +250,8 @@ def start( agent_id: OptionalNullable[str] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrequest.ConversationRequestAgentVersion, - models_conversationrequest.ConversationRequestAgentVersionTypedDict, + models.ConversationRequestAgentVersion, + models.ConversationRequestAgentVersionTypedDict, ] ] = UNSET, model: OptionalNullable[str] = UNSET, @@ -360,7 +344,7 @@ def start( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartConversation", + operation_id="agents_api_v1_conversations_start", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -376,42 +360,36 @@ def start( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def start_async( self, *, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Union[models.ConversationInputs, models.ConversationInputsTypedDict], stream: Optional[bool] = False, store: OptionalNullable[bool] = UNSET, handoff_execution: OptionalNullable[ - models_conversationrequest.ConversationRequestHandoffExecution + models.ConversationRequestHandoffExecution ] = UNSET, instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_conversationrequest.ConversationRequestTool], - List[models_conversationrequest.ConversationRequestToolTypedDict], + List[models.ConversationRequestTool], + List[models.ConversationRequestToolTypedDict], ] ] = None, completion_args: OptionalNullable[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = UNSET, name: OptionalNullable[str] = UNSET, description: OptionalNullable[str] = UNSET, @@ -419,8 +397,8 @@ async def start_async( agent_id: OptionalNullable[str] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrequest.ConversationRequestAgentVersion, - models_conversationrequest.ConversationRequestAgentVersionTypedDict, + models.ConversationRequestAgentVersion, + models.ConversationRequestAgentVersionTypedDict, ] ] = UNSET, model: OptionalNullable[str] = UNSET, @@ -513,7 +491,7 @@ async def start_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartConversation", + operation_id="agents_api_v1_conversations_start", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -529,17 +507,17 @@ async def start_async( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def list( self, @@ -551,7 +529,7 @@ def list( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> List[models.ListConversationsResponse]: + ) -> List[models.AgentsAPIV1ConversationsListResponse]: r"""List all created conversations. Retrieve a list of conversation entities sorted by creation time. @@ -574,7 +552,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListConversationsRequest( + request = models.AgentsAPIV1ConversationsListRequest( page=page, page_size=page_size, metadata=metadata, @@ -609,7 +587,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListConversations", + operation_id="agents_api_v1_conversations_list", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -623,21 +601,21 @@ def list( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): return unmarshal_json_response( - List[models.ListConversationsResponse], http_res + List[models.AgentsAPIV1ConversationsListResponse], http_res ) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -649,7 +627,7 @@ async def list_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> List[models.ListConversationsResponse]: + ) -> List[models.AgentsAPIV1ConversationsListResponse]: r"""List all created conversations. Retrieve a list of conversation entities sorted by creation time. @@ -672,7 +650,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListConversationsRequest( + request = models.AgentsAPIV1ConversationsListRequest( page=page, page_size=page_size, metadata=metadata, @@ -707,7 +685,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListConversations", + operation_id="agents_api_v1_conversations_list", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -721,21 +699,21 @@ async def list_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): return unmarshal_json_response( - List[models.ListConversationsResponse], http_res + List[models.AgentsAPIV1ConversationsListResponse], http_res ) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -766,7 +744,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationRequest( + request = models.AgentsAPIV1ConversationsGetRequest( conversation_id=conversation_id, ) @@ -799,7 +777,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversation", + operation_id="agents_api_v1_conversations_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -815,17 +793,17 @@ def get( return unmarshal_json_response(models.ResponseV1ConversationsGet, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -856,7 +834,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationRequest( + request = models.AgentsAPIV1ConversationsGetRequest( conversation_id=conversation_id, ) @@ -889,7 +867,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversation", + operation_id="agents_api_v1_conversations_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -905,17 +883,17 @@ async def get_async( return unmarshal_json_response(models.ResponseV1ConversationsGet, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -946,7 +924,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteConversationRequest( + request = models.AgentsAPIV1ConversationsDeleteRequest( conversation_id=conversation_id, ) @@ -979,7 +957,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteConversation", + operation_id="agents_api_v1_conversations_delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -995,17 +973,17 @@ def delete( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -1036,7 +1014,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteConversationRequest( + request = models.AgentsAPIV1ConversationsDeleteRequest( conversation_id=conversation_id, ) @@ -1069,7 +1047,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteConversation", + operation_id="agents_api_v1_conversations_delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1085,37 +1063,39 @@ async def delete_async( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def append( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = False, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationappendrequest.ConversationAppendRequestHandoffExecution + models.ConversationAppendRequestHandoffExecution ] = "server", completion_args: Optional[ + Union[models.CompletionArgs, models.CompletionArgsTypedDict] + ] = None, + tool_confirmations: OptionalNullable[ Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, + List[models.ToolCallConfirmation], + List[models.ToolCallConfirmationTypedDict], ] - ] = None, + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1131,6 +1111,7 @@ def append( :param store: Whether to store the results into our servers or not. :param handoff_execution: :param completion_args: White-listed arguments from the completion API + :param tool_confirmations: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -1146,16 +1127,22 @@ def append( else: base_url = self._get_url(base_url, url_variables) - request = models.AppendConversationRequest( + request = models.AgentsAPIV1ConversationsAppendRequest( conversation_id=conversation_id, conversation_append_request=models.ConversationAppendRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] ), + tool_confirmations=utils.get_pydantic_model( + tool_confirmations, + OptionalNullable[List[models.ToolCallConfirmation]], + ), ), ) @@ -1195,7 +1182,7 @@ def append( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="AppendConversation", + operation_id="agents_api_v1_conversations_append", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1211,37 +1198,39 @@ def append( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def append_async( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = False, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationappendrequest.ConversationAppendRequestHandoffExecution + models.ConversationAppendRequestHandoffExecution ] = "server", completion_args: Optional[ + Union[models.CompletionArgs, models.CompletionArgsTypedDict] + ] = None, + tool_confirmations: OptionalNullable[ Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, + List[models.ToolCallConfirmation], + List[models.ToolCallConfirmationTypedDict], ] - ] = None, + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -1257,6 +1246,7 @@ async def append_async( :param store: Whether to store the results into our servers or not. :param handoff_execution: :param completion_args: White-listed arguments from the completion API + :param tool_confirmations: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -1272,16 +1262,22 @@ async def append_async( else: base_url = self._get_url(base_url, url_variables) - request = models.AppendConversationRequest( + request = models.AgentsAPIV1ConversationsAppendRequest( conversation_id=conversation_id, conversation_append_request=models.ConversationAppendRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] ), + tool_confirmations=utils.get_pydantic_model( + tool_confirmations, + OptionalNullable[List[models.ToolCallConfirmation]], + ), ), ) @@ -1321,7 +1317,7 @@ async def append_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="AppendConversation", + operation_id="agents_api_v1_conversations_append", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1337,17 +1333,17 @@ async def append_async( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get_history( self, @@ -1378,7 +1374,7 @@ def get_history( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationHistoryRequest( + request = models.AgentsAPIV1ConversationsHistoryRequest( conversation_id=conversation_id, ) @@ -1411,7 +1407,7 @@ def get_history( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversationHistory", + operation_id="agents_api_v1_conversations_history", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1427,17 +1423,17 @@ def get_history( return unmarshal_json_response(models.ConversationHistory, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_history_async( self, @@ -1468,7 +1464,7 @@ async def get_history_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationHistoryRequest( + request = models.AgentsAPIV1ConversationsHistoryRequest( conversation_id=conversation_id, ) @@ -1501,7 +1497,7 @@ async def get_history_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversationHistory", + operation_id="agents_api_v1_conversations_history", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1517,17 +1513,17 @@ async def get_history_async( return unmarshal_json_response(models.ConversationHistory, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get_messages( self, @@ -1558,7 +1554,7 @@ def get_messages( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationMessagesRequest( + request = models.AgentsAPIV1ConversationsMessagesRequest( conversation_id=conversation_id, ) @@ -1591,7 +1587,7 @@ def get_messages( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversationMessages", + operation_id="agents_api_v1_conversations_messages", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1607,17 +1603,17 @@ def get_messages( return unmarshal_json_response(models.ConversationMessages, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_messages_async( self, @@ -1648,7 +1644,7 @@ async def get_messages_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetConversationMessagesRequest( + request = models.AgentsAPIV1ConversationsMessagesRequest( conversation_id=conversation_id, ) @@ -1681,7 +1677,7 @@ async def get_messages_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetConversationMessages", + operation_id="agents_api_v1_conversations_messages", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1697,43 +1693,39 @@ async def get_messages_async( return unmarshal_json_response(models.ConversationMessages, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def restart( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], from_entry_id: str, + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = False, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationrestartrequest.ConversationRestartRequestHandoffExecution + models.ConversationRestartRequestHandoffExecution ] = "server", completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrestartrequest.ConversationRestartRequestAgentVersion, - models_conversationrestartrequest.ConversationRestartRequestAgentVersionTypedDict, + models.ConversationRestartRequestAgentVersion, + models.ConversationRestartRequestAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -1746,8 +1738,8 @@ def restart( Given a conversation_id and an id, recreate a conversation from this point and run completion. A new conversation is returned with the new entries returned. :param conversation_id: ID of the original conversation which is being restarted. - :param inputs: :param from_entry_id: + :param inputs: :param stream: :param store: Whether to store the results into our servers or not. :param handoff_execution: @@ -1769,10 +1761,12 @@ def restart( else: base_url = self._get_url(base_url, url_variables) - request = models.RestartConversationRequest( + request = models.AgentsAPIV1ConversationsRestartRequest( conversation_id=conversation_id, conversation_restart_request=models.ConversationRestartRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, @@ -1821,7 +1815,7 @@ def restart( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RestartConversation", + operation_id="agents_api_v1_conversations_restart", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1837,43 +1831,39 @@ def restart( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def restart_async( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], from_entry_id: str, + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = False, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationrestartrequest.ConversationRestartRequestHandoffExecution + models.ConversationRestartRequestHandoffExecution ] = "server", completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrestartrequest.ConversationRestartRequestAgentVersion, - models_conversationrestartrequest.ConversationRestartRequestAgentVersionTypedDict, + models.ConversationRestartRequestAgentVersion, + models.ConversationRestartRequestAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -1886,8 +1876,8 @@ async def restart_async( Given a conversation_id and an id, recreate a conversation from this point and run completion. A new conversation is returned with the new entries returned. :param conversation_id: ID of the original conversation which is being restarted. - :param inputs: :param from_entry_id: + :param inputs: :param stream: :param store: Whether to store the results into our servers or not. :param handoff_execution: @@ -1909,10 +1899,12 @@ async def restart_async( else: base_url = self._get_url(base_url, url_variables) - request = models.RestartConversationRequest( + request = models.AgentsAPIV1ConversationsRestartRequest( conversation_id=conversation_id, conversation_restart_request=models.ConversationRestartRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, @@ -1961,7 +1953,7 @@ async def restart_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RestartConversation", + operation_id="agents_api_v1_conversations_restart", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1977,44 +1969,36 @@ async def restart_async( return unmarshal_json_response(models.ConversationResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def start_stream( self, *, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Union[models.ConversationInputs, models.ConversationInputsTypedDict], stream: Optional[bool] = True, store: OptionalNullable[bool] = UNSET, handoff_execution: OptionalNullable[ - models_conversationstreamrequest.ConversationStreamRequestHandoffExecution + models.ConversationStreamRequestHandoffExecution ] = UNSET, instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_conversationstreamrequest.ConversationStreamRequestTool], - List[ - models_conversationstreamrequest.ConversationStreamRequestToolTypedDict - ], + List[models.ConversationStreamRequestTool], + List[models.ConversationStreamRequestToolTypedDict], ] ] = None, completion_args: OptionalNullable[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = UNSET, name: OptionalNullable[str] = UNSET, description: OptionalNullable[str] = UNSET, @@ -2022,8 +2006,8 @@ def start_stream( agent_id: OptionalNullable[str] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationstreamrequest.ConversationStreamRequestAgentVersion, - models_conversationstreamrequest.ConversationStreamRequestAgentVersionTypedDict, + models.ConversationStreamRequestAgentVersion, + models.ConversationStreamRequestAgentVersionTypedDict, ] ] = UNSET, model: OptionalNullable[str] = UNSET, @@ -2116,7 +2100,7 @@ def start_stream( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartConversationStream", + operation_id="agents_api_v1_conversations_start_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2138,45 +2122,37 @@ def start_stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def start_stream_async( self, *, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Union[models.ConversationInputs, models.ConversationInputsTypedDict], stream: Optional[bool] = True, store: OptionalNullable[bool] = UNSET, handoff_execution: OptionalNullable[ - models_conversationstreamrequest.ConversationStreamRequestHandoffExecution + models.ConversationStreamRequestHandoffExecution ] = UNSET, instructions: OptionalNullable[str] = UNSET, tools: Optional[ Union[ - List[models_conversationstreamrequest.ConversationStreamRequestTool], - List[ - models_conversationstreamrequest.ConversationStreamRequestToolTypedDict - ], + List[models.ConversationStreamRequestTool], + List[models.ConversationStreamRequestToolTypedDict], ] ] = None, completion_args: OptionalNullable[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = UNSET, name: OptionalNullable[str] = UNSET, description: OptionalNullable[str] = UNSET, @@ -2184,8 +2160,8 @@ async def start_stream_async( agent_id: OptionalNullable[str] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationstreamrequest.ConversationStreamRequestAgentVersion, - models_conversationstreamrequest.ConversationStreamRequestAgentVersionTypedDict, + models.ConversationStreamRequestAgentVersion, + models.ConversationStreamRequestAgentVersionTypedDict, ] ] = UNSET, model: OptionalNullable[str] = UNSET, @@ -2278,7 +2254,7 @@ async def start_stream_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartConversationStream", + operation_id="agents_api_v1_conversations_start_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2300,38 +2276,40 @@ async def start_stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def append_stream( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = True, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationappendstreamrequest.ConversationAppendStreamRequestHandoffExecution + models.ConversationAppendStreamRequestHandoffExecution ] = "server", completion_args: Optional[ + Union[models.CompletionArgs, models.CompletionArgsTypedDict] + ] = None, + tool_confirmations: OptionalNullable[ Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, + List[models.ToolCallConfirmation], + List[models.ToolCallConfirmationTypedDict], ] - ] = None, + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -2347,6 +2325,7 @@ def append_stream( :param store: Whether to store the results into our servers or not. :param handoff_execution: :param completion_args: White-listed arguments from the completion API + :param tool_confirmations: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -2362,16 +2341,22 @@ def append_stream( else: base_url = self._get_url(base_url, url_variables) - request = models.AppendConversationStreamRequest( + request = models.AgentsAPIV1ConversationsAppendStreamRequest( conversation_id=conversation_id, conversation_append_stream_request=models.ConversationAppendStreamRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] ), + tool_confirmations=utils.get_pydantic_model( + tool_confirmations, + OptionalNullable[List[models.ToolCallConfirmation]], + ), ), ) @@ -2411,7 +2396,7 @@ def append_stream( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="AppendConversationStream", + operation_id="agents_api_v1_conversations_append_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2433,38 +2418,40 @@ def append_stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def append_stream_async( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = True, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationappendstreamrequest.ConversationAppendStreamRequestHandoffExecution + models.ConversationAppendStreamRequestHandoffExecution ] = "server", completion_args: Optional[ + Union[models.CompletionArgs, models.CompletionArgsTypedDict] + ] = None, + tool_confirmations: OptionalNullable[ Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, + List[models.ToolCallConfirmation], + List[models.ToolCallConfirmationTypedDict], ] - ] = None, + ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -2480,6 +2467,7 @@ async def append_stream_async( :param store: Whether to store the results into our servers or not. :param handoff_execution: :param completion_args: White-listed arguments from the completion API + :param tool_confirmations: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds @@ -2495,16 +2483,22 @@ async def append_stream_async( else: base_url = self._get_url(base_url, url_variables) - request = models.AppendConversationStreamRequest( + request = models.AgentsAPIV1ConversationsAppendStreamRequest( conversation_id=conversation_id, conversation_append_stream_request=models.ConversationAppendStreamRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, completion_args=utils.get_pydantic_model( completion_args, Optional[models.CompletionArgs] ), + tool_confirmations=utils.get_pydantic_model( + tool_confirmations, + OptionalNullable[List[models.ToolCallConfirmation]], + ), ), ) @@ -2544,7 +2538,7 @@ async def append_stream_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="AppendConversationStream", + operation_id="agents_api_v1_conversations_append_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2566,44 +2560,40 @@ async def append_stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def restart_stream( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], from_entry_id: str, + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = True, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationrestartstreamrequest.ConversationRestartStreamRequestHandoffExecution + models.ConversationRestartStreamRequestHandoffExecution ] = "server", completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersion, - models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersionTypedDict, + models.ConversationRestartStreamRequestAgentVersion, + models.ConversationRestartStreamRequestAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -2616,8 +2606,8 @@ def restart_stream( Given a conversation_id and an id, recreate a conversation from this point and run completion. A new conversation is returned with the new entries returned. :param conversation_id: ID of the original conversation which is being restarted. - :param inputs: :param from_entry_id: + :param inputs: :param stream: :param store: Whether to store the results into our servers or not. :param handoff_execution: @@ -2639,10 +2629,12 @@ def restart_stream( else: base_url = self._get_url(base_url, url_variables) - request = models.RestartConversationStreamRequest( + request = models.AgentsAPIV1ConversationsRestartStreamRequest( conversation_id=conversation_id, conversation_restart_stream_request=models.ConversationRestartStreamRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, @@ -2691,7 +2683,7 @@ def restart_stream( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RestartConversationStream", + operation_id="agents_api_v1_conversations_restart_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2713,44 +2705,40 @@ def restart_stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def restart_stream_async( self, *, conversation_id: str, - inputs: Union[ - models_conversationinputs.ConversationInputs, - models_conversationinputs.ConversationInputsTypedDict, - ], from_entry_id: str, + inputs: Optional[ + Union[models.ConversationInputs, models.ConversationInputsTypedDict] + ] = None, stream: Optional[bool] = True, store: Optional[bool] = True, handoff_execution: Optional[ - models_conversationrestartstreamrequest.ConversationRestartStreamRequestHandoffExecution + models.ConversationRestartStreamRequestHandoffExecution ] = "server", completion_args: Optional[ - Union[ - models_completionargs.CompletionArgs, - models_completionargs.CompletionArgsTypedDict, - ] + Union[models.CompletionArgs, models.CompletionArgsTypedDict] ] = None, metadata: OptionalNullable[Dict[str, Any]] = UNSET, agent_version: OptionalNullable[ Union[ - models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersion, - models_conversationrestartstreamrequest.ConversationRestartStreamRequestAgentVersionTypedDict, + models.ConversationRestartStreamRequestAgentVersion, + models.ConversationRestartStreamRequestAgentVersionTypedDict, ] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -2763,8 +2751,8 @@ async def restart_stream_async( Given a conversation_id and an id, recreate a conversation from this point and run completion. A new conversation is returned with the new entries returned. :param conversation_id: ID of the original conversation which is being restarted. - :param inputs: :param from_entry_id: + :param inputs: :param stream: :param store: Whether to store the results into our servers or not. :param handoff_execution: @@ -2786,10 +2774,12 @@ async def restart_stream_async( else: base_url = self._get_url(base_url, url_variables) - request = models.RestartConversationStreamRequest( + request = models.AgentsAPIV1ConversationsRestartStreamRequest( conversation_id=conversation_id, conversation_restart_stream_request=models.ConversationRestartStreamRequest( - inputs=utils.get_pydantic_model(inputs, models.ConversationInputs), + inputs=utils.get_pydantic_model( + inputs, Optional[models.ConversationInputs] + ), stream=stream, store=store, handoff_execution=handoff_execution, @@ -2838,7 +2828,7 @@ async def restart_stream_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RestartConversationStream", + operation_id="agents_api_v1_conversations_restart_stream", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -2860,15 +2850,15 @@ async def restart_stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) diff --git a/src/mistralai/client/documents.py b/src/mistralai/client/documents.py index c78f2944..b3130364 100644 --- a/src/mistralai/client/documents.py +++ b/src/mistralai/client/documents.py @@ -2,12 +2,8 @@ # @generated-id: bcc17286c31c from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - documentupdatein as models_documentupdatein, - file as models_file, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -31,7 +27,7 @@ def list( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListDocumentOut: + ) -> models.ListDocumentsResponse: r"""List documents in a given library. Given a library, lists the document that have been uploaded to that library. @@ -58,7 +54,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListDocumentsRequest( + request = models.LibrariesDocumentsListV1Request( library_id=library_id, search=search, page_size=page_size, @@ -97,7 +93,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListDocuments", + operation_id="libraries_documents_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -110,20 +106,20 @@ def list( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListDocumentOut, http_res) + return unmarshal_json_response(models.ListDocumentsResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -139,7 +135,7 @@ async def list_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListDocumentOut: + ) -> models.ListDocumentsResponse: r"""List documents in a given library. Given a library, lists the document that have been uploaded to that library. @@ -166,7 +162,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListDocumentsRequest( + request = models.LibrariesDocumentsListV1Request( library_id=library_id, search=search, page_size=page_size, @@ -205,7 +201,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListDocuments", + operation_id="libraries_documents_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -218,31 +214,31 @@ async def list_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListDocumentOut, http_res) + return unmarshal_json_response(models.ListDocumentsResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def upload( self, *, library_id: str, - file: Union[models_file.File, models_file.FileTypedDict], + file: Union[models.File, models.FileTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Upload a new document. Given a library, upload a new document to that library. It is queued for processing, it status will change it has been processed. The processing has to be completed in order be discoverable for the library search @@ -272,7 +268,7 @@ def upload( else: base_url = self._get_url(base_url, url_variables) - request = models.UploadDocumentRequest( + request = models.LibrariesDocumentsUploadV1Request( library_id=library_id, request_body=models.DocumentUpload( file=utils.get_pydantic_model(file, models.File), @@ -311,7 +307,7 @@ def upload( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UploadDocument", + operation_id="libraries_documents_upload_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -324,31 +320,31 @@ def upload( response_data: Any = None if utils.match_response(http_res, ["200", "201"], "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def upload_async( self, *, library_id: str, - file: Union[models_file.File, models_file.FileTypedDict], + file: Union[models.File, models.FileTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Upload a new document. Given a library, upload a new document to that library. It is queued for processing, it status will change it has been processed. The processing has to be completed in order be discoverable for the library search @@ -378,7 +374,7 @@ async def upload_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UploadDocumentRequest( + request = models.LibrariesDocumentsUploadV1Request( library_id=library_id, request_body=models.DocumentUpload( file=utils.get_pydantic_model(file, models.File), @@ -417,7 +413,7 @@ async def upload_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UploadDocument", + operation_id="libraries_documents_upload_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -430,20 +426,20 @@ async def upload_async( response_data: Any = None if utils.match_response(http_res, ["200", "201"], "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -454,7 +450,7 @@ def get( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Retrieve the metadata of a specific document. Given a library and a document in this library, you can retrieve the metadata of that document. @@ -476,7 +472,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentRequest( + request = models.LibrariesDocumentsGetV1Request( library_id=library_id, document_id=document_id, ) @@ -510,7 +506,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocument", + operation_id="libraries_documents_get_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -523,20 +519,20 @@ def get( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -547,7 +543,7 @@ async def get_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Retrieve the metadata of a specific document. Given a library and a document in this library, you can retrieve the metadata of that document. @@ -569,7 +565,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentRequest( + request = models.LibrariesDocumentsGetV1Request( library_id=library_id, document_id=document_id, ) @@ -603,7 +599,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocument", + operation_id="libraries_documents_get_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -616,20 +612,20 @@ async def get_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update( self, @@ -638,16 +634,13 @@ def update( document_id: str, name: OptionalNullable[str] = UNSET, attributes: OptionalNullable[ - Union[ - Dict[str, models_documentupdatein.Attributes], - Dict[str, models_documentupdatein.AttributesTypedDict], - ] + Union[Dict[str, models.Attributes], Dict[str, models.AttributesTypedDict]] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Update the metadata of a specific document. Given a library and a document in that library, update the name of that document. @@ -671,10 +664,10 @@ def update( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateDocumentRequest( + request = models.LibrariesDocumentsUpdateV1Request( library_id=library_id, document_id=document_id, - document_update_in=models.DocumentUpdateIn( + update_document_request=models.UpdateDocumentRequest( name=name, attributes=attributes, ), @@ -694,11 +687,11 @@ def update( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.document_update_in, + request.update_document_request, False, False, "json", - models.DocumentUpdateIn, + models.UpdateDocumentRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -716,7 +709,7 @@ def update( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateDocument", + operation_id="libraries_documents_update_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -729,20 +722,20 @@ def update( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_async( self, @@ -751,16 +744,13 @@ async def update_async( document_id: str, name: OptionalNullable[str] = UNSET, attributes: OptionalNullable[ - Union[ - Dict[str, models_documentupdatein.Attributes], - Dict[str, models_documentupdatein.AttributesTypedDict], - ] + Union[Dict[str, models.Attributes], Dict[str, models.AttributesTypedDict]] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DocumentOut: + ) -> models.Document: r"""Update the metadata of a specific document. Given a library and a document in that library, update the name of that document. @@ -784,10 +774,10 @@ async def update_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateDocumentRequest( + request = models.LibrariesDocumentsUpdateV1Request( library_id=library_id, document_id=document_id, - document_update_in=models.DocumentUpdateIn( + update_document_request=models.UpdateDocumentRequest( name=name, attributes=attributes, ), @@ -807,11 +797,11 @@ async def update_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.document_update_in, + request.update_document_request, False, False, "json", - models.DocumentUpdateIn, + models.UpdateDocumentRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -829,7 +819,7 @@ async def update_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateDocument", + operation_id="libraries_documents_update_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -842,20 +832,20 @@ async def update_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DocumentOut, http_res) + return unmarshal_json_response(models.Document, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -888,7 +878,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteDocumentRequest( + request = models.LibrariesDocumentsDeleteV1Request( library_id=library_id, document_id=document_id, ) @@ -922,7 +912,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteDocument", + operation_id="libraries_documents_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -938,17 +928,17 @@ def delete( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -981,7 +971,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteDocumentRequest( + request = models.LibrariesDocumentsDeleteV1Request( library_id=library_id, document_id=document_id, ) @@ -1015,7 +1005,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteDocument", + operation_id="libraries_documents_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1031,17 +1021,17 @@ async def delete_async( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def text_content( self, @@ -1074,7 +1064,7 @@ def text_content( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentTextContentRequest( + request = models.LibrariesDocumentsGetTextContentV1Request( library_id=library_id, document_id=document_id, ) @@ -1108,7 +1098,7 @@ def text_content( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentTextContent", + operation_id="libraries_documents_get_text_content_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1124,17 +1114,17 @@ def text_content( return unmarshal_json_response(models.DocumentTextContent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def text_content_async( self, @@ -1167,7 +1157,7 @@ async def text_content_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentTextContentRequest( + request = models.LibrariesDocumentsGetTextContentV1Request( library_id=library_id, document_id=document_id, ) @@ -1201,7 +1191,7 @@ async def text_content_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentTextContent", + operation_id="libraries_documents_get_text_content_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1217,17 +1207,17 @@ async def text_content_async( return unmarshal_json_response(models.DocumentTextContent, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def status( self, @@ -1260,7 +1250,7 @@ def status( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentStatusRequest( + request = models.LibrariesDocumentsGetStatusV1Request( library_id=library_id, document_id=document_id, ) @@ -1294,7 +1284,7 @@ def status( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentStatus", + operation_id="libraries_documents_get_status_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1310,17 +1300,17 @@ def status( return unmarshal_json_response(models.ProcessingStatusOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def status_async( self, @@ -1353,7 +1343,7 @@ async def status_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentStatusRequest( + request = models.LibrariesDocumentsGetStatusV1Request( library_id=library_id, document_id=document_id, ) @@ -1387,7 +1377,7 @@ async def status_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentStatus", + operation_id="libraries_documents_get_status_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1403,17 +1393,17 @@ async def status_async( return unmarshal_json_response(models.ProcessingStatusOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get_signed_url( self, @@ -1446,7 +1436,7 @@ def get_signed_url( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentSignedURLRequest( + request = models.LibrariesDocumentsGetSignedURLV1Request( library_id=library_id, document_id=document_id, ) @@ -1480,7 +1470,7 @@ def get_signed_url( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentSignedUrl", + operation_id="libraries_documents_get_signed_url_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1496,17 +1486,17 @@ def get_signed_url( return unmarshal_json_response(str, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_signed_url_async( self, @@ -1539,7 +1529,7 @@ async def get_signed_url_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentSignedURLRequest( + request = models.LibrariesDocumentsGetSignedURLV1Request( library_id=library_id, document_id=document_id, ) @@ -1573,7 +1563,7 @@ async def get_signed_url_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentSignedUrl", + operation_id="libraries_documents_get_signed_url_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1589,17 +1579,17 @@ async def get_signed_url_async( return unmarshal_json_response(str, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def extracted_text_signed_url( self, @@ -1632,7 +1622,7 @@ def extracted_text_signed_url( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentExtractedTextSignedURLRequest( + request = models.LibrariesDocumentsGetExtractedTextSignedURLV1Request( library_id=library_id, document_id=document_id, ) @@ -1666,7 +1656,7 @@ def extracted_text_signed_url( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentExtractedTextSignedUrl", + operation_id="libraries_documents_get_extracted_text_signed_url_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1682,17 +1672,17 @@ def extracted_text_signed_url( return unmarshal_json_response(str, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def extracted_text_signed_url_async( self, @@ -1725,7 +1715,7 @@ async def extracted_text_signed_url_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetDocumentExtractedTextSignedURLRequest( + request = models.LibrariesDocumentsGetExtractedTextSignedURLV1Request( library_id=library_id, document_id=document_id, ) @@ -1759,7 +1749,7 @@ async def extracted_text_signed_url_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetDocumentExtractedTextSignedUrl", + operation_id="libraries_documents_get_extracted_text_signed_url_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1775,17 +1765,17 @@ async def extracted_text_signed_url_async( return unmarshal_json_response(str, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def reprocess( self, @@ -1818,7 +1808,7 @@ def reprocess( else: base_url = self._get_url(base_url, url_variables) - request = models.ReprocessDocumentRequest( + request = models.LibrariesDocumentsReprocessV1Request( library_id=library_id, document_id=document_id, ) @@ -1852,7 +1842,7 @@ def reprocess( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ReprocessDocument", + operation_id="libraries_documents_reprocess_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1868,17 +1858,17 @@ def reprocess( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def reprocess_async( self, @@ -1911,7 +1901,7 @@ async def reprocess_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ReprocessDocumentRequest( + request = models.LibrariesDocumentsReprocessV1Request( library_id=library_id, document_id=document_id, ) @@ -1945,7 +1935,7 @@ async def reprocess_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ReprocessDocument", + operation_id="libraries_documents_reprocess_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1961,14 +1951,14 @@ async def reprocess_async( return if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/embeddings.py b/src/mistralai/client/embeddings.py index 4a056baa..5f9d3b9c 100644 --- a/src/mistralai/client/embeddings.py +++ b/src/mistralai/client/embeddings.py @@ -2,13 +2,8 @@ # @generated-id: f9c17258207e from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - embeddingdtype as models_embeddingdtype, - embeddingrequest as models_embeddingrequest, - encodingformat as models_encodingformat, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -23,13 +18,12 @@ def create( *, model: str, inputs: Union[ - models_embeddingrequest.EmbeddingRequestInputs, - models_embeddingrequest.EmbeddingRequestInputsTypedDict, + models.EmbeddingRequestInputs, models.EmbeddingRequestInputsTypedDict ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, output_dimension: OptionalNullable[int] = UNSET, - output_dtype: Optional[models_embeddingdtype.EmbeddingDtype] = None, - encoding_format: Optional[models_encodingformat.EncodingFormat] = None, + output_dtype: Optional[models.EmbeddingDtype] = None, + encoding_format: Optional[models.EncodingFormat] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -117,30 +111,29 @@ def create( return unmarshal_json_response(models.EmbeddingResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_async( self, *, model: str, inputs: Union[ - models_embeddingrequest.EmbeddingRequestInputs, - models_embeddingrequest.EmbeddingRequestInputsTypedDict, + models.EmbeddingRequestInputs, models.EmbeddingRequestInputsTypedDict ], metadata: OptionalNullable[Dict[str, Any]] = UNSET, output_dimension: OptionalNullable[int] = UNSET, - output_dtype: Optional[models_embeddingdtype.EmbeddingDtype] = None, - encoding_format: Optional[models_encodingformat.EncodingFormat] = None, + output_dtype: Optional[models.EmbeddingDtype] = None, + encoding_format: Optional[models.EncodingFormat] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -228,14 +221,14 @@ async def create_async( return unmarshal_json_response(models.EmbeddingResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/errors/__init__.py b/src/mistralai/client/errors/__init__.py new file mode 100644 index 00000000..58a591a1 --- /dev/null +++ b/src/mistralai/client/errors/__init__.py @@ -0,0 +1,40 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 0b2db51246df + +from .mistralerror import MistralError +from typing import Any, TYPE_CHECKING + +from mistralai.client.utils.dynamic_imports import lazy_getattr, lazy_dir + +if TYPE_CHECKING: + from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData + from .no_response_error import NoResponseError + from .responsevalidationerror import ResponseValidationError + from .sdkerror import SDKError + +__all__ = [ + "HTTPValidationError", + "HTTPValidationErrorData", + "MistralError", + "NoResponseError", + "ResponseValidationError", + "SDKError", +] + +_dynamic_imports: dict[str, str] = { + "HTTPValidationError": ".httpvalidationerror", + "HTTPValidationErrorData": ".httpvalidationerror", + "NoResponseError": ".no_response_error", + "ResponseValidationError": ".responsevalidationerror", + "SDKError": ".sdkerror", +} + + +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) + + +def __dir__(): + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/src/mistralai/client/models/httpvalidationerror.py b/src/mistralai/client/errors/httpvalidationerror.py similarity index 75% rename from src/mistralai/client/models/httpvalidationerror.py rename to src/mistralai/client/errors/httpvalidationerror.py index e7f0a35b..97b16562 100644 --- a/src/mistralai/client/models/httpvalidationerror.py +++ b/src/mistralai/client/errors/httpvalidationerror.py @@ -1,17 +1,17 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4099f568a6f8 +# @generated-id: ac3de4a52bb6 from __future__ import annotations -from .validationerror import ValidationError from dataclasses import dataclass, field import httpx -from mistralai.client.models import MistralError +from mistralai.client.errors import MistralError +from mistralai.client.models import validationerror as models_validationerror from mistralai.client.types import BaseModel from typing import List, Optional class HTTPValidationErrorData(BaseModel): - detail: Optional[List[ValidationError]] = None + detail: Optional[List[models_validationerror.ValidationError]] = None @dataclass(unsafe_hash=True) diff --git a/src/mistralai/client/models/mistralerror.py b/src/mistralai/client/errors/mistralerror.py similarity index 96% rename from src/mistralai/client/models/mistralerror.py rename to src/mistralai/client/errors/mistralerror.py index 862a6be8..eb73040c 100644 --- a/src/mistralai/client/models/mistralerror.py +++ b/src/mistralai/client/errors/mistralerror.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 68ffd8394c2e +# @generated-id: d1f57f0ff1e9 import httpx from typing import Optional diff --git a/src/mistralai/client/models/no_response_error.py b/src/mistralai/client/errors/no_response_error.py similarity index 93% rename from src/mistralai/client/models/no_response_error.py rename to src/mistralai/client/errors/no_response_error.py index 7705f194..d71dfa7b 100644 --- a/src/mistralai/client/models/no_response_error.py +++ b/src/mistralai/client/errors/no_response_error.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 2849e0a482e2 +# @generated-id: 8b469ecb0906 from dataclasses import dataclass diff --git a/src/mistralai/client/models/responsevalidationerror.py b/src/mistralai/client/errors/responsevalidationerror.py similarity index 90% rename from src/mistralai/client/models/responsevalidationerror.py rename to src/mistralai/client/errors/responsevalidationerror.py index 1ed0d552..a7b3b9f0 100644 --- a/src/mistralai/client/models/responsevalidationerror.py +++ b/src/mistralai/client/errors/responsevalidationerror.py @@ -1,11 +1,11 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: c244a88981e0 +# @generated-id: 6cfaa3147abe import httpx from typing import Optional from dataclasses import dataclass -from mistralai.client.models import MistralError +from mistralai.client.errors import MistralError @dataclass(unsafe_hash=True) diff --git a/src/mistralai/client/models/sdkerror.py b/src/mistralai/client/errors/sdkerror.py similarity index 94% rename from src/mistralai/client/models/sdkerror.py rename to src/mistralai/client/errors/sdkerror.py index 101e1e6a..25b87255 100644 --- a/src/mistralai/client/models/sdkerror.py +++ b/src/mistralai/client/errors/sdkerror.py @@ -1,11 +1,11 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 12f991dad510 +# @generated-id: c489ffe1e9ca import httpx from typing import Optional from dataclasses import dataclass -from mistralai.client.models import MistralError +from mistralai.client.errors import MistralError MAX_MESSAGE_LEN = 10_000 diff --git a/src/mistralai/client/files.py b/src/mistralai/client/files.py index 57d389f1..a5f3adf6 100644 --- a/src/mistralai/client/files.py +++ b/src/mistralai/client/files.py @@ -3,14 +3,8 @@ from .basesdk import BaseSDK import httpx -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - file as models_file, - filepurpose as models_filepurpose, - sampletype as models_sampletype, - source as models_source, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -23,13 +17,13 @@ class Files(BaseSDK): def upload( self, *, - file: Union[models_file.File, models_file.FileTypedDict], - purpose: Optional[models_filepurpose.FilePurpose] = None, + file: Union[models.File, models.FileTypedDict], + purpose: Optional[models.FilePurpose] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UploadFileOut: + ) -> models.CreateFileResponse: r"""Upload File Upload a file that can be used across various endpoints. @@ -100,7 +94,7 @@ def upload( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UploadFile", + operation_id="files_api_routes_upload_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -112,26 +106,26 @@ def upload( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UploadFileOut, http_res) + return unmarshal_json_response(models.CreateFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def upload_async( self, *, - file: Union[models_file.File, models_file.FileTypedDict], - purpose: Optional[models_filepurpose.FilePurpose] = None, + file: Union[models.File, models.FileTypedDict], + purpose: Optional[models.FilePurpose] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UploadFileOut: + ) -> models.CreateFileResponse: r"""Upload File Upload a file that can be used across various endpoints. @@ -202,7 +196,7 @@ async def upload_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UploadFile", + operation_id="files_api_routes_upload_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -214,15 +208,15 @@ async def upload_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UploadFileOut, http_res) + return unmarshal_json_response(models.CreateFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def list( self, @@ -230,16 +224,16 @@ def list( page: Optional[int] = 0, page_size: Optional[int] = 100, include_total: Optional[bool] = True, - sample_type: OptionalNullable[List[models_sampletype.SampleType]] = UNSET, - source: OptionalNullable[List[models_source.Source]] = UNSET, + sample_type: OptionalNullable[List[models.SampleType]] = UNSET, + source: OptionalNullable[List[models.Source]] = UNSET, search: OptionalNullable[str] = UNSET, - purpose: OptionalNullable[models_filepurpose.FilePurpose] = UNSET, + purpose: OptionalNullable[models.FilePurpose] = UNSET, mimetypes: OptionalNullable[List[str]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListFilesOut: + ) -> models.ListFilesResponse: r"""List Files Returns a list of files that belong to the user's organization. @@ -267,7 +261,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListFilesRequest( + request = models.FilesAPIRoutesListFilesRequest( page=page, page_size=page_size, include_total=include_total, @@ -307,7 +301,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListFiles", + operation_id="files_api_routes_list_files", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -319,15 +313,15 @@ def list( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListFilesOut, http_res) + return unmarshal_json_response(models.ListFilesResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -335,16 +329,16 @@ async def list_async( page: Optional[int] = 0, page_size: Optional[int] = 100, include_total: Optional[bool] = True, - sample_type: OptionalNullable[List[models_sampletype.SampleType]] = UNSET, - source: OptionalNullable[List[models_source.Source]] = UNSET, + sample_type: OptionalNullable[List[models.SampleType]] = UNSET, + source: OptionalNullable[List[models.Source]] = UNSET, search: OptionalNullable[str] = UNSET, - purpose: OptionalNullable[models_filepurpose.FilePurpose] = UNSET, + purpose: OptionalNullable[models.FilePurpose] = UNSET, mimetypes: OptionalNullable[List[str]] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListFilesOut: + ) -> models.ListFilesResponse: r"""List Files Returns a list of files that belong to the user's organization. @@ -372,7 +366,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListFilesRequest( + request = models.FilesAPIRoutesListFilesRequest( page=page, page_size=page_size, include_total=include_total, @@ -412,7 +406,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListFiles", + operation_id="files_api_routes_list_files", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -424,15 +418,15 @@ async def list_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListFilesOut, http_res) + return unmarshal_json_response(models.ListFilesResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def retrieve( self, @@ -442,7 +436,7 @@ def retrieve( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.RetrieveFileOut: + ) -> models.GetFileResponse: r"""Retrieve File Returns information about a specific file. @@ -463,7 +457,7 @@ def retrieve( else: base_url = self._get_url(base_url, url_variables) - request = models.RetrieveFileRequest( + request = models.FilesAPIRoutesRetrieveFileRequest( file_id=file_id, ) @@ -496,7 +490,7 @@ def retrieve( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RetrieveFile", + operation_id="files_api_routes_retrieve_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -508,15 +502,15 @@ def retrieve( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.RetrieveFileOut, http_res) + return unmarshal_json_response(models.GetFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def retrieve_async( self, @@ -526,7 +520,7 @@ async def retrieve_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.RetrieveFileOut: + ) -> models.GetFileResponse: r"""Retrieve File Returns information about a specific file. @@ -547,7 +541,7 @@ async def retrieve_async( else: base_url = self._get_url(base_url, url_variables) - request = models.RetrieveFileRequest( + request = models.FilesAPIRoutesRetrieveFileRequest( file_id=file_id, ) @@ -580,7 +574,7 @@ async def retrieve_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RetrieveFile", + operation_id="files_api_routes_retrieve_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -592,15 +586,15 @@ async def retrieve_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.RetrieveFileOut, http_res) + return unmarshal_json_response(models.GetFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -610,7 +604,7 @@ def delete( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DeleteFileOut: + ) -> models.DeleteFileResponse: r"""Delete File Delete a file. @@ -631,7 +625,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteFileRequest( + request = models.FilesAPIRoutesDeleteFileRequest( file_id=file_id, ) @@ -664,7 +658,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteFile", + operation_id="files_api_routes_delete_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -676,15 +670,15 @@ def delete( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DeleteFileOut, http_res) + return unmarshal_json_response(models.DeleteFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -694,7 +688,7 @@ async def delete_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.DeleteFileOut: + ) -> models.DeleteFileResponse: r"""Delete File Delete a file. @@ -715,7 +709,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteFileRequest( + request = models.FilesAPIRoutesDeleteFileRequest( file_id=file_id, ) @@ -748,7 +742,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteFile", + operation_id="files_api_routes_delete_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -760,15 +754,15 @@ async def delete_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.DeleteFileOut, http_res) + return unmarshal_json_response(models.DeleteFileResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def download( self, @@ -799,7 +793,7 @@ def download( else: base_url = self._get_url(base_url, url_variables) - request = models.DownloadFileRequest( + request = models.FilesAPIRoutesDownloadFileRequest( file_id=file_id, ) @@ -832,7 +826,7 @@ def download( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DownloadFile", + operation_id="files_api_routes_download_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -848,13 +842,13 @@ def download( return http_res if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def download_async( self, @@ -885,7 +879,7 @@ async def download_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DownloadFileRequest( + request = models.FilesAPIRoutesDownloadFileRequest( file_id=file_id, ) @@ -918,7 +912,7 @@ async def download_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DownloadFile", + operation_id="files_api_routes_download_file", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -934,13 +928,13 @@ async def download_async( return http_res if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) def get_signed_url( self, @@ -951,7 +945,7 @@ def get_signed_url( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.FileSignedURL: + ) -> models.GetSignedURLResponse: r"""Get Signed Url :param file_id: @@ -971,7 +965,7 @@ def get_signed_url( else: base_url = self._get_url(base_url, url_variables) - request = models.GetFileSignedURLRequest( + request = models.FilesAPIRoutesGetSignedURLRequest( file_id=file_id, expiry=expiry, ) @@ -1005,7 +999,7 @@ def get_signed_url( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetFileSignedUrl", + operation_id="files_api_routes_get_signed_url", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1017,15 +1011,15 @@ def get_signed_url( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.FileSignedURL, http_res) + return unmarshal_json_response(models.GetSignedURLResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_signed_url_async( self, @@ -1036,7 +1030,7 @@ async def get_signed_url_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.FileSignedURL: + ) -> models.GetSignedURLResponse: r"""Get Signed Url :param file_id: @@ -1056,7 +1050,7 @@ async def get_signed_url_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetFileSignedURLRequest( + request = models.FilesAPIRoutesGetSignedURLRequest( file_id=file_id, expiry=expiry, ) @@ -1090,7 +1084,7 @@ async def get_signed_url_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetFileSignedUrl", + operation_id="files_api_routes_get_signed_url", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1102,12 +1096,12 @@ async def get_signed_url_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.FileSignedURL, http_res) + return unmarshal_json_response(models.GetSignedURLResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/fim.py b/src/mistralai/client/fim.py index be3f7742..8ffb7730 100644 --- a/src/mistralai/client/fim.py +++ b/src/mistralai/client/fim.py @@ -2,12 +2,8 @@ # @generated-id: 217bea5d701d from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - fimcompletionrequest as models_fimcompletionrequest, - fimcompletionstreamrequest as models_fimcompletionstreamrequest, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import eventstreaming, get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -28,8 +24,8 @@ def complete( stream: Optional[bool] = False, stop: Optional[ Union[ - models_fimcompletionrequest.FIMCompletionRequestStop, - models_fimcompletionrequest.FIMCompletionRequestStopTypedDict, + models.FIMCompletionRequestStop, + models.FIMCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, @@ -133,17 +129,17 @@ def complete( return unmarshal_json_response(models.FIMCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, @@ -156,8 +152,8 @@ async def complete_async( stream: Optional[bool] = False, stop: Optional[ Union[ - models_fimcompletionrequest.FIMCompletionRequestStop, - models_fimcompletionrequest.FIMCompletionRequestStopTypedDict, + models.FIMCompletionRequestStop, + models.FIMCompletionRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, @@ -261,17 +257,17 @@ async def complete_async( return unmarshal_json_response(models.FIMCompletionResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def stream( self, @@ -284,8 +280,8 @@ def stream( stream: Optional[bool] = True, stop: Optional[ Union[ - models_fimcompletionstreamrequest.FIMCompletionStreamRequestStop, - models_fimcompletionstreamrequest.FIMCompletionStreamRequestStopTypedDict, + models.FIMCompletionStreamRequestStop, + models.FIMCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, @@ -396,18 +392,18 @@ def stream( if utils.match_response(http_res, "422", "application/json"): http_res_text = utils.stream_to_text(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, @@ -420,8 +416,8 @@ async def stream_async( stream: Optional[bool] = True, stop: Optional[ Union[ - models_fimcompletionstreamrequest.FIMCompletionStreamRequestStop, - models_fimcompletionstreamrequest.FIMCompletionStreamRequestStopTypedDict, + models.FIMCompletionStreamRequestStop, + models.FIMCompletionStreamRequestStopTypedDict, ] ] = None, random_seed: OptionalNullable[int] = UNSET, @@ -532,15 +528,15 @@ async def stream_async( if utils.match_response(http_res, "422", "application/json"): http_res_text = await utils.stream_to_text_async(http_res) response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res, http_res_text + errors.HTTPValidationErrorData, http_res, http_res_text ) - raise models.HTTPValidationError(response_data, http_res, http_res_text) + raise errors.HTTPValidationError(response_data, http_res, http_res_text) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) diff --git a/src/mistralai/client/fine_tuning_jobs.py b/src/mistralai/client/fine_tuning_jobs.py index 9a28ded1..c2ee871b 100644 --- a/src/mistralai/client/fine_tuning_jobs.py +++ b/src/mistralai/client/fine_tuning_jobs.py @@ -3,15 +3,8 @@ from .basesdk import BaseSDK from datetime import datetime -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - classifiertargetin as models_classifiertargetin, - finetuneablemodeltype as models_finetuneablemodeltype, - jobin as models_jobin, - listfinetuningjobsop as models_listfinetuningjobsop, - trainingfile as models_trainingfile, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -29,7 +22,7 @@ def list( created_before: OptionalNullable[datetime] = UNSET, created_by_me: Optional[bool] = False, status: OptionalNullable[ - models_listfinetuningjobsop.ListFineTuningJobsStatus + models.JobsAPIRoutesFineTuningGetFineTuningJobsStatus ] = UNSET, wandb_project: OptionalNullable[str] = UNSET, wandb_name: OptionalNullable[str] = UNSET, @@ -38,7 +31,7 @@ def list( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.JobsOut: + ) -> models.ListFineTuningJobsResponse: r"""Get Fine Tuning Jobs Get a list of fine-tuning jobs for your organization and user. @@ -68,7 +61,7 @@ def list( else: base_url = self._get_url(base_url, url_variables) - request = models.ListFineTuningJobsRequest( + request = models.JobsAPIRoutesFineTuningGetFineTuningJobsRequest( page=page, page_size=page_size, model=model, @@ -110,7 +103,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListFineTuningJobs", + operation_id="jobs_api_routes_fine_tuning_get_fine_tuning_jobs", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -122,15 +115,15 @@ def list( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.JobsOut, http_res) + return unmarshal_json_response(models.ListFineTuningJobsResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -142,7 +135,7 @@ async def list_async( created_before: OptionalNullable[datetime] = UNSET, created_by_me: Optional[bool] = False, status: OptionalNullable[ - models_listfinetuningjobsop.ListFineTuningJobsStatus + models.JobsAPIRoutesFineTuningGetFineTuningJobsStatus ] = UNSET, wandb_project: OptionalNullable[str] = UNSET, wandb_name: OptionalNullable[str] = UNSET, @@ -151,7 +144,7 @@ async def list_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.JobsOut: + ) -> models.ListFineTuningJobsResponse: r"""Get Fine Tuning Jobs Get a list of fine-tuning jobs for your organization and user. @@ -181,7 +174,7 @@ async def list_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ListFineTuningJobsRequest( + request = models.JobsAPIRoutesFineTuningGetFineTuningJobsRequest( page=page, page_size=page_size, model=model, @@ -223,7 +216,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListFineTuningJobs", + operation_id="jobs_api_routes_fine_tuning_get_fine_tuning_jobs", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -235,59 +228,49 @@ async def list_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.JobsOut, http_res) + return unmarshal_json_response(models.ListFineTuningJobsResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def create( self, *, model: str, - hyperparameters: Union[ - models_jobin.Hyperparameters, models_jobin.HyperparametersTypedDict - ], + hyperparameters: Union[models.Hyperparameters, models.HyperparametersTypedDict], training_files: Optional[ - Union[ - List[models_trainingfile.TrainingFile], - List[models_trainingfile.TrainingFileTypedDict], - ] + Union[List[models.TrainingFile], List[models.TrainingFileTypedDict]] ] = None, validation_files: OptionalNullable[List[str]] = UNSET, suffix: OptionalNullable[str] = UNSET, integrations: OptionalNullable[ Union[ - List[models_jobin.JobInIntegration], - List[models_jobin.JobInIntegrationTypedDict], + List[models.CreateFineTuningJobRequestIntegration], + List[models.CreateFineTuningJobRequestIntegrationTypedDict], ] ] = UNSET, auto_start: Optional[bool] = None, invalid_sample_skip_percentage: Optional[float] = 0, - job_type: OptionalNullable[ - models_finetuneablemodeltype.FineTuneableModelType - ] = UNSET, + job_type: OptionalNullable[models.FineTuneableModelType] = UNSET, repositories: OptionalNullable[ Union[ - List[models_jobin.JobInRepository], - List[models_jobin.JobInRepositoryTypedDict], + List[models.CreateFineTuningJobRequestRepository], + List[models.CreateFineTuningJobRequestRepositoryTypedDict], ] ] = UNSET, classifier_targets: OptionalNullable[ - Union[ - List[models_classifiertargetin.ClassifierTargetIn], - List[models_classifiertargetin.ClassifierTargetInTypedDict], - ] + Union[List[models.ClassifierTarget], List[models.ClassifierTargetTypedDict]] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.CreateFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningCreateFineTuningJobResponse: r"""Create Fine Tuning Job Create a new fine-tuning job, it will be queued for processing. @@ -318,7 +301,7 @@ def create( else: base_url = self._get_url(base_url, url_variables) - request = models.JobIn( + request = models.CreateFineTuningJobRequest( model=model, training_files=utils.get_pydantic_model( training_files, Optional[List[models.TrainingFile]] @@ -326,7 +309,8 @@ def create( validation_files=validation_files, suffix=suffix, integrations=utils.get_pydantic_model( - integrations, OptionalNullable[List[models.JobInIntegration]] + integrations, + OptionalNullable[List[models.CreateFineTuningJobRequestIntegration]], ), auto_start=auto_start, invalid_sample_skip_percentage=invalid_sample_skip_percentage, @@ -335,10 +319,11 @@ def create( hyperparameters, models.Hyperparameters ), repositories=utils.get_pydantic_model( - repositories, OptionalNullable[List[models.JobInRepository]] + repositories, + OptionalNullable[List[models.CreateFineTuningJobRequestRepository]], ), classifier_targets=utils.get_pydantic_model( - classifier_targets, OptionalNullable[List[models.ClassifierTargetIn]] + classifier_targets, OptionalNullable[List[models.ClassifierTarget]] ), ) @@ -356,7 +341,7 @@ def create( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.JobIn + request, False, False, "json", models.CreateFineTuningJobRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -374,7 +359,7 @@ def create( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_create_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -386,59 +371,51 @@ def create( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.CreateFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningCreateFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_async( self, *, model: str, - hyperparameters: Union[ - models_jobin.Hyperparameters, models_jobin.HyperparametersTypedDict - ], + hyperparameters: Union[models.Hyperparameters, models.HyperparametersTypedDict], training_files: Optional[ - Union[ - List[models_trainingfile.TrainingFile], - List[models_trainingfile.TrainingFileTypedDict], - ] + Union[List[models.TrainingFile], List[models.TrainingFileTypedDict]] ] = None, validation_files: OptionalNullable[List[str]] = UNSET, suffix: OptionalNullable[str] = UNSET, integrations: OptionalNullable[ Union[ - List[models_jobin.JobInIntegration], - List[models_jobin.JobInIntegrationTypedDict], + List[models.CreateFineTuningJobRequestIntegration], + List[models.CreateFineTuningJobRequestIntegrationTypedDict], ] ] = UNSET, auto_start: Optional[bool] = None, invalid_sample_skip_percentage: Optional[float] = 0, - job_type: OptionalNullable[ - models_finetuneablemodeltype.FineTuneableModelType - ] = UNSET, + job_type: OptionalNullable[models.FineTuneableModelType] = UNSET, repositories: OptionalNullable[ Union[ - List[models_jobin.JobInRepository], - List[models_jobin.JobInRepositoryTypedDict], + List[models.CreateFineTuningJobRequestRepository], + List[models.CreateFineTuningJobRequestRepositoryTypedDict], ] ] = UNSET, classifier_targets: OptionalNullable[ - Union[ - List[models_classifiertargetin.ClassifierTargetIn], - List[models_classifiertargetin.ClassifierTargetInTypedDict], - ] + Union[List[models.ClassifierTarget], List[models.ClassifierTargetTypedDict]] ] = UNSET, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.CreateFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningCreateFineTuningJobResponse: r"""Create Fine Tuning Job Create a new fine-tuning job, it will be queued for processing. @@ -469,7 +446,7 @@ async def create_async( else: base_url = self._get_url(base_url, url_variables) - request = models.JobIn( + request = models.CreateFineTuningJobRequest( model=model, training_files=utils.get_pydantic_model( training_files, Optional[List[models.TrainingFile]] @@ -477,7 +454,8 @@ async def create_async( validation_files=validation_files, suffix=suffix, integrations=utils.get_pydantic_model( - integrations, OptionalNullable[List[models.JobInIntegration]] + integrations, + OptionalNullable[List[models.CreateFineTuningJobRequestIntegration]], ), auto_start=auto_start, invalid_sample_skip_percentage=invalid_sample_skip_percentage, @@ -486,10 +464,11 @@ async def create_async( hyperparameters, models.Hyperparameters ), repositories=utils.get_pydantic_model( - repositories, OptionalNullable[List[models.JobInRepository]] + repositories, + OptionalNullable[List[models.CreateFineTuningJobRequestRepository]], ), classifier_targets=utils.get_pydantic_model( - classifier_targets, OptionalNullable[List[models.ClassifierTargetIn]] + classifier_targets, OptionalNullable[List[models.ClassifierTarget]] ), ) @@ -507,7 +486,7 @@ async def create_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.JobIn + request, False, False, "json", models.CreateFineTuningJobRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -525,7 +504,7 @@ async def create_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_create_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -537,15 +516,17 @@ async def create_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.CreateFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningCreateFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -555,7 +536,7 @@ def get( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.GetFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningGetFineTuningJobResponse: r"""Get Fine Tuning Job Get a fine-tuned job details by its UUID. @@ -576,7 +557,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningGetFineTuningJobRequest( job_id=job_id, ) @@ -609,7 +590,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_get_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -621,15 +602,17 @@ def get( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.GetFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningGetFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -639,7 +622,7 @@ async def get_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.GetFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningGetFineTuningJobResponse: r"""Get Fine Tuning Job Get a fine-tuned job details by its UUID. @@ -660,7 +643,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningGetFineTuningJobRequest( job_id=job_id, ) @@ -693,7 +676,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_get_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -705,15 +688,17 @@ async def get_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.GetFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningGetFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def cancel( self, @@ -723,7 +708,7 @@ def cancel( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.CancelFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningCancelFineTuningJobResponse: r"""Cancel Fine Tuning Job Request the cancellation of a fine tuning job. @@ -744,7 +729,7 @@ def cancel( else: base_url = self._get_url(base_url, url_variables) - request = models.CancelFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningCancelFineTuningJobRequest( job_id=job_id, ) @@ -777,7 +762,7 @@ def cancel( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CancelFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_cancel_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -789,15 +774,17 @@ def cancel( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.CancelFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningCancelFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def cancel_async( self, @@ -807,7 +794,7 @@ async def cancel_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.CancelFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningCancelFineTuningJobResponse: r"""Cancel Fine Tuning Job Request the cancellation of a fine tuning job. @@ -828,7 +815,7 @@ async def cancel_async( else: base_url = self._get_url(base_url, url_variables) - request = models.CancelFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningCancelFineTuningJobRequest( job_id=job_id, ) @@ -861,7 +848,7 @@ async def cancel_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CancelFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_cancel_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -873,15 +860,17 @@ async def cancel_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.CancelFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningCancelFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def start( self, @@ -891,7 +880,7 @@ def start( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.StartFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningStartFineTuningJobResponse: r"""Start Fine Tuning Job Request the start of a validated fine tuning job. @@ -912,7 +901,7 @@ def start( else: base_url = self._get_url(base_url, url_variables) - request = models.StartFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningStartFineTuningJobRequest( job_id=job_id, ) @@ -945,7 +934,7 @@ def start( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_start_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -957,15 +946,17 @@ def start( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.StartFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningStartFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def start_async( self, @@ -975,7 +966,7 @@ async def start_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.StartFineTuningJobResponse: + ) -> models.JobsAPIRoutesFineTuningStartFineTuningJobResponse: r"""Start Fine Tuning Job Request the start of a validated fine tuning job. @@ -996,7 +987,7 @@ async def start_async( else: base_url = self._get_url(base_url, url_variables) - request = models.StartFineTuningJobRequest( + request = models.JobsAPIRoutesFineTuningStartFineTuningJobRequest( job_id=job_id, ) @@ -1029,7 +1020,7 @@ async def start_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="StartFineTuningJob", + operation_id="jobs_api_routes_fine_tuning_start_fine_tuning_job", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1041,12 +1032,14 @@ async def start_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.StartFineTuningJobResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningStartFineTuningJobResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/libraries.py b/src/mistralai/client/libraries.py index 26ceabe1..b8728362 100644 --- a/src/mistralai/client/libraries.py +++ b/src/mistralai/client/libraries.py @@ -3,7 +3,7 @@ from .basesdk import BaseSDK from .sdkconfiguration import SDKConfiguration -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext from mistralai.client.accesses import Accesses from mistralai.client.documents import Documents @@ -39,7 +39,7 @@ def list( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListLibraryOut: + ) -> models.ListLibrariesResponse: r"""List all libraries you have access to. List all libraries that you have created or have been shared with you. @@ -87,7 +87,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListLibraries", + operation_id="libraries_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -99,15 +99,15 @@ def list( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListLibraryOut, http_res) + return unmarshal_json_response(models.ListLibrariesResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -116,7 +116,7 @@ async def list_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ListLibraryOut: + ) -> models.ListLibrariesResponse: r"""List all libraries you have access to. List all libraries that you have created or have been shared with you. @@ -164,7 +164,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListLibraries", + operation_id="libraries_list_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -176,15 +176,15 @@ async def list_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ListLibraryOut, http_res) + return unmarshal_json_response(models.ListLibrariesResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def create( self, @@ -196,7 +196,7 @@ def create( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Create a new Library. Create a new Library, you will be marked as the owner and only you will have the possibility to share it with others. When first created this will only be accessible by you. @@ -219,7 +219,7 @@ def create( else: base_url = self._get_url(base_url, url_variables) - request = models.LibraryIn( + request = models.CreateLibraryRequest( name=name, description=description, chunk_size=chunk_size, @@ -239,7 +239,7 @@ def create( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.LibraryIn + request, False, False, "json", models.CreateLibraryRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -257,7 +257,7 @@ def create( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateLibrary", + operation_id="libraries_create_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -270,20 +270,20 @@ def create( response_data: Any = None if utils.match_response(http_res, "201", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def create_async( self, @@ -295,7 +295,7 @@ async def create_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Create a new Library. Create a new Library, you will be marked as the owner and only you will have the possibility to share it with others. When first created this will only be accessible by you. @@ -318,7 +318,7 @@ async def create_async( else: base_url = self._get_url(base_url, url_variables) - request = models.LibraryIn( + request = models.CreateLibraryRequest( name=name, description=description, chunk_size=chunk_size, @@ -338,7 +338,7 @@ async def create_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request, False, False, "json", models.LibraryIn + request, False, False, "json", models.CreateLibraryRequest ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -356,7 +356,7 @@ async def create_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="CreateLibrary", + operation_id="libraries_create_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -369,20 +369,20 @@ async def create_async( response_data: Any = None if utils.match_response(http_res, "201", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def get( self, @@ -392,7 +392,7 @@ def get( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Detailed information about a specific Library. Given a library id, details information about that Library. @@ -413,7 +413,7 @@ def get( else: base_url = self._get_url(base_url, url_variables) - request = models.GetLibraryRequest( + request = models.LibrariesGetV1Request( library_id=library_id, ) @@ -446,7 +446,7 @@ def get( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetLibrary", + operation_id="libraries_get_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -459,20 +459,20 @@ def get( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def get_async( self, @@ -482,7 +482,7 @@ async def get_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Detailed information about a specific Library. Given a library id, details information about that Library. @@ -503,7 +503,7 @@ async def get_async( else: base_url = self._get_url(base_url, url_variables) - request = models.GetLibraryRequest( + request = models.LibrariesGetV1Request( library_id=library_id, ) @@ -536,7 +536,7 @@ async def get_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="GetLibrary", + operation_id="libraries_get_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -549,20 +549,20 @@ async def get_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -572,7 +572,7 @@ def delete( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Delete a library and all of it's document. Given a library id, deletes it together with all documents that have been uploaded to that library. @@ -593,7 +593,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteLibraryRequest( + request = models.LibrariesDeleteV1Request( library_id=library_id, ) @@ -626,7 +626,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteLibrary", + operation_id="libraries_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -639,20 +639,20 @@ def delete( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -662,7 +662,7 @@ async def delete_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Delete a library and all of it's document. Given a library id, deletes it together with all documents that have been uploaded to that library. @@ -683,7 +683,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteLibraryRequest( + request = models.LibrariesDeleteV1Request( library_id=library_id, ) @@ -716,7 +716,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteLibrary", + operation_id="libraries_delete_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -729,20 +729,20 @@ async def delete_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update( self, @@ -754,7 +754,7 @@ def update( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Update a library. Given a library id, you can update the name and description. @@ -777,9 +777,9 @@ def update( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateLibraryRequest( + request = models.LibrariesUpdateV1Request( library_id=library_id, - library_in_update=models.LibraryInUpdate( + update_library_request=models.UpdateLibraryRequest( name=name, description=description, ), @@ -799,7 +799,11 @@ def update( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.library_in_update, False, False, "json", models.LibraryInUpdate + request.update_library_request, + False, + False, + "json", + models.UpdateLibraryRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -817,7 +821,7 @@ def update( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateLibrary", + operation_id="libraries_update_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -830,20 +834,20 @@ def update( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_async( self, @@ -855,7 +859,7 @@ async def update_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.LibraryOut: + ) -> models.Library: r"""Update a library. Given a library id, you can update the name and description. @@ -878,9 +882,9 @@ async def update_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateLibraryRequest( + request = models.LibrariesUpdateV1Request( library_id=library_id, - library_in_update=models.LibraryInUpdate( + update_library_request=models.UpdateLibraryRequest( name=name, description=description, ), @@ -900,7 +904,11 @@ async def update_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.library_in_update, False, False, "json", models.LibraryInUpdate + request.update_library_request, + False, + False, + "json", + models.UpdateLibraryRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -918,7 +926,7 @@ async def update_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateLibrary", + operation_id="libraries_update_v1", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -931,17 +939,17 @@ async def update_async( response_data: Any = None if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.LibraryOut, http_res) + return unmarshal_json_response(models.Library, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/models/__init__.py b/src/mistralai/client/models/__init__.py index 093ffcbd..5ef8b3f3 100644 --- a/src/mistralai/client/models/__init__.py +++ b/src/mistralai/client/models/__init__.py @@ -1,42 +1,116 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" # @generated-id: e0e8dad92725 -from .mistralerror import MistralError -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING + +from mistralai.client.utils.dynamic_imports import lazy_getattr, lazy_dir if TYPE_CHECKING: - from .agent import Agent, AgentObject, AgentTool, AgentToolTypedDict, AgentTypedDict + from .agent import ( + Agent, + AgentTool, + AgentToolTypedDict, + AgentTypedDict, + UnknownAgentTool, + ) from .agentaliasresponse import AgentAliasResponse, AgentAliasResponseTypedDict from .agentconversation import ( AgentConversation, AgentConversationAgentVersion, AgentConversationAgentVersionTypedDict, - AgentConversationObject, AgentConversationTypedDict, ) - from .agentcreationrequest import ( - AgentCreationRequest, - AgentCreationRequestTool, - AgentCreationRequestToolTypedDict, - AgentCreationRequestTypedDict, - ) from .agenthandoffdoneevent import ( AgentHandoffDoneEvent, AgentHandoffDoneEventTypedDict, ) - from .agenthandoffentry import ( - AgentHandoffEntry, - AgentHandoffEntryObject, - AgentHandoffEntryType, - AgentHandoffEntryTypedDict, - ) + from .agenthandoffentry import AgentHandoffEntry, AgentHandoffEntryTypedDict from .agenthandoffstartedevent import ( AgentHandoffStartedEvent, AgentHandoffStartedEventTypedDict, ) + from .agents_api_v1_agents_create_or_update_aliasop import ( + AgentsAPIV1AgentsCreateOrUpdateAliasRequest, + AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict, + ) + from .agents_api_v1_agents_delete_aliasop import ( + AgentsAPIV1AgentsDeleteAliasRequest, + AgentsAPIV1AgentsDeleteAliasRequestTypedDict, + ) + from .agents_api_v1_agents_deleteop import ( + AgentsAPIV1AgentsDeleteRequest, + AgentsAPIV1AgentsDeleteRequestTypedDict, + ) + from .agents_api_v1_agents_get_versionop import ( + AgentsAPIV1AgentsGetVersionRequest, + AgentsAPIV1AgentsGetVersionRequestTypedDict, + ) + from .agents_api_v1_agents_getop import ( + AgentsAPIV1AgentsGetAgentVersion, + AgentsAPIV1AgentsGetAgentVersionTypedDict, + AgentsAPIV1AgentsGetRequest, + AgentsAPIV1AgentsGetRequestTypedDict, + ) + from .agents_api_v1_agents_list_version_aliasesop import ( + AgentsAPIV1AgentsListVersionAliasesRequest, + AgentsAPIV1AgentsListVersionAliasesRequestTypedDict, + ) + from .agents_api_v1_agents_list_versionsop import ( + AgentsAPIV1AgentsListVersionsRequest, + AgentsAPIV1AgentsListVersionsRequestTypedDict, + ) + from .agents_api_v1_agents_listop import ( + AgentsAPIV1AgentsListRequest, + AgentsAPIV1AgentsListRequestTypedDict, + ) + from .agents_api_v1_agents_update_versionop import ( + AgentsAPIV1AgentsUpdateVersionRequest, + AgentsAPIV1AgentsUpdateVersionRequestTypedDict, + ) + from .agents_api_v1_agents_updateop import ( + AgentsAPIV1AgentsUpdateRequest, + AgentsAPIV1AgentsUpdateRequestTypedDict, + ) + from .agents_api_v1_conversations_append_streamop import ( + AgentsAPIV1ConversationsAppendStreamRequest, + AgentsAPIV1ConversationsAppendStreamRequestTypedDict, + ) + from .agents_api_v1_conversations_appendop import ( + AgentsAPIV1ConversationsAppendRequest, + AgentsAPIV1ConversationsAppendRequestTypedDict, + ) + from .agents_api_v1_conversations_deleteop import ( + AgentsAPIV1ConversationsDeleteRequest, + AgentsAPIV1ConversationsDeleteRequestTypedDict, + ) + from .agents_api_v1_conversations_getop import ( + AgentsAPIV1ConversationsGetRequest, + AgentsAPIV1ConversationsGetRequestTypedDict, + ResponseV1ConversationsGet, + ResponseV1ConversationsGetTypedDict, + ) + from .agents_api_v1_conversations_historyop import ( + AgentsAPIV1ConversationsHistoryRequest, + AgentsAPIV1ConversationsHistoryRequestTypedDict, + ) + from .agents_api_v1_conversations_listop import ( + AgentsAPIV1ConversationsListRequest, + AgentsAPIV1ConversationsListRequestTypedDict, + AgentsAPIV1ConversationsListResponse, + AgentsAPIV1ConversationsListResponseTypedDict, + ) + from .agents_api_v1_conversations_messagesop import ( + AgentsAPIV1ConversationsMessagesRequest, + AgentsAPIV1ConversationsMessagesRequestTypedDict, + ) + from .agents_api_v1_conversations_restart_streamop import ( + AgentsAPIV1ConversationsRestartStreamRequest, + AgentsAPIV1ConversationsRestartStreamRequestTypedDict, + ) + from .agents_api_v1_conversations_restartop import ( + AgentsAPIV1ConversationsRestartRequest, + AgentsAPIV1ConversationsRestartRequestTypedDict, + ) from .agentscompletionrequest import ( AgentsCompletionRequest, AgentsCompletionRequestMessage, @@ -57,28 +131,15 @@ AgentsCompletionStreamRequestToolChoiceTypedDict, AgentsCompletionStreamRequestTypedDict, ) - from .agentupdaterequest import ( - AgentUpdateRequest, - AgentUpdateRequestTool, - AgentUpdateRequestToolTypedDict, - AgentUpdateRequestTypedDict, - ) from .apiendpoint import APIEndpoint - from .appendconversationop import ( - AppendConversationRequest, - AppendConversationRequestTypedDict, - ) - from .appendconversationstreamop import ( - AppendConversationStreamRequest, - AppendConversationStreamRequestTypedDict, + from .archivemodelresponse import ( + ArchiveModelResponse, + ArchiveModelResponseTypedDict, ) - from .archiveftmodelout import ArchiveFTModelOut, ArchiveFTModelOutTypedDict - from .archivemodelop import ArchiveModelRequest, ArchiveModelRequestTypedDict from .assistantmessage import ( AssistantMessage, AssistantMessageContent, AssistantMessageContentTypedDict, - AssistantMessageRole, AssistantMessageTypedDict, ) from .audiochunk import AudioChunk, AudioChunkTypedDict @@ -94,19 +155,10 @@ ) from .basemodelcard import BaseModelCard, BaseModelCardTypedDict from .batcherror import BatchError, BatchErrorTypedDict - from .batchjobin import BatchJobIn, BatchJobInTypedDict - from .batchjobout import BatchJobOut, BatchJobOutTypedDict - from .batchjobsout import BatchJobsOut, BatchJobsOutTypedDict + from .batchjob import BatchJob, BatchJobTypedDict from .batchjobstatus import BatchJobStatus from .batchrequest import BatchRequest, BatchRequestTypedDict from .builtinconnectors import BuiltInConnectors - from .cancelbatchjobop import CancelBatchJobRequest, CancelBatchJobRequestTypedDict - from .cancelfinetuningjobop import ( - CancelFineTuningJobRequest, - CancelFineTuningJobRequestTypedDict, - CancelFineTuningJobResponse, - CancelFineTuningJobResponseTypedDict, - ) from .chatclassificationrequest import ( ChatClassificationRequest, ChatClassificationRequestTypedDict, @@ -150,7 +202,7 @@ ChatModerationRequestInputs3TypedDict, ChatModerationRequestTypedDict, ) - from .checkpointout import CheckpointOut, CheckpointOutTypedDict + from .checkpoint import Checkpoint, CheckpointTypedDict from .classificationrequest import ( ClassificationRequest, ClassificationRequestInputs, @@ -165,60 +217,65 @@ ClassificationTargetResult, ClassificationTargetResultTypedDict, ) - from .classifierdetailedjobout import ( - ClassifierDetailedJobOut, - ClassifierDetailedJobOutIntegration, - ClassifierDetailedJobOutIntegrationTypedDict, - ClassifierDetailedJobOutStatus, - ClassifierDetailedJobOutTypedDict, - ) - from .classifierftmodelout import ( - ClassifierFTModelOut, - ClassifierFTModelOutTypedDict, - ) - from .classifierjobout import ( - ClassifierJobOut, - ClassifierJobOutIntegration, - ClassifierJobOutIntegrationTypedDict, - ClassifierJobOutStatus, - ClassifierJobOutTypedDict, - ) - from .classifiertargetin import ClassifierTargetIn, ClassifierTargetInTypedDict - from .classifiertargetout import ClassifierTargetOut, ClassifierTargetOutTypedDict + from .classifierfinetunedmodel import ( + ClassifierFineTunedModel, + ClassifierFineTunedModelTypedDict, + ) + from .classifierfinetuningjob import ( + ClassifierFineTuningJob, + ClassifierFineTuningJobIntegration, + ClassifierFineTuningJobIntegrationTypedDict, + ClassifierFineTuningJobStatus, + ClassifierFineTuningJobTypedDict, + UnknownClassifierFineTuningJobIntegration, + ) + from .classifierfinetuningjobdetails import ( + ClassifierFineTuningJobDetails, + ClassifierFineTuningJobDetailsIntegration, + ClassifierFineTuningJobDetailsIntegrationTypedDict, + ClassifierFineTuningJobDetailsStatus, + ClassifierFineTuningJobDetailsTypedDict, + UnknownClassifierFineTuningJobDetailsIntegration, + ) + from .classifiertarget import ClassifierTarget, ClassifierTargetTypedDict + from .classifiertargetresult import ( + ClassifierTargetResult, + ClassifierTargetResultTypedDict, + ) from .classifiertrainingparameters import ( ClassifierTrainingParameters, ClassifierTrainingParametersTypedDict, ) - from .classifiertrainingparametersin import ( - ClassifierTrainingParametersIn, - ClassifierTrainingParametersInTypedDict, - ) from .codeinterpretertool import CodeInterpreterTool, CodeInterpreterToolTypedDict from .completionargs import CompletionArgs, CompletionArgsTypedDict from .completionargsstop import CompletionArgsStop, CompletionArgsStopTypedDict from .completionchunk import CompletionChunk, CompletionChunkTypedDict - from .completiondetailedjobout import ( - CompletionDetailedJobOut, - CompletionDetailedJobOutIntegration, - CompletionDetailedJobOutIntegrationTypedDict, - CompletionDetailedJobOutRepository, - CompletionDetailedJobOutRepositoryTypedDict, - CompletionDetailedJobOutStatus, - CompletionDetailedJobOutTypedDict, - ) from .completionevent import CompletionEvent, CompletionEventTypedDict - from .completionftmodelout import ( - CompletionFTModelOut, - CompletionFTModelOutTypedDict, - ) - from .completionjobout import ( - CompletionJobOut, - CompletionJobOutIntegration, - CompletionJobOutIntegrationTypedDict, - CompletionJobOutRepository, - CompletionJobOutRepositoryTypedDict, - CompletionJobOutStatus, - CompletionJobOutTypedDict, + from .completionfinetunedmodel import ( + CompletionFineTunedModel, + CompletionFineTunedModelTypedDict, + ) + from .completionfinetuningjob import ( + CompletionFineTuningJob, + CompletionFineTuningJobIntegration, + CompletionFineTuningJobIntegrationTypedDict, + CompletionFineTuningJobRepository, + CompletionFineTuningJobRepositoryTypedDict, + CompletionFineTuningJobStatus, + CompletionFineTuningJobTypedDict, + UnknownCompletionFineTuningJobIntegration, + UnknownCompletionFineTuningJobRepository, + ) + from .completionfinetuningjobdetails import ( + CompletionFineTuningJobDetails, + CompletionFineTuningJobDetailsIntegration, + CompletionFineTuningJobDetailsIntegrationTypedDict, + CompletionFineTuningJobDetailsRepository, + CompletionFineTuningJobDetailsRepositoryTypedDict, + CompletionFineTuningJobDetailsStatus, + CompletionFineTuningJobDetailsTypedDict, + UnknownCompletionFineTuningJobDetailsIntegration, + UnknownCompletionFineTuningJobDetailsRepository, ) from .completionresponsestreamchoice import ( CompletionResponseStreamChoice, @@ -229,11 +286,7 @@ CompletionTrainingParameters, CompletionTrainingParametersTypedDict, ) - from .completiontrainingparametersin import ( - CompletionTrainingParametersIn, - CompletionTrainingParametersInTypedDict, - ) - from .contentchunk import ContentChunk, ContentChunkTypedDict + from .contentchunk import ContentChunk, ContentChunkTypedDict, UnknownContentChunk from .conversationappendrequest import ( ConversationAppendRequest, ConversationAppendRequestHandoffExecution, @@ -249,10 +302,10 @@ ConversationEventsData, ConversationEventsDataTypedDict, ConversationEventsTypedDict, + UnknownConversationEventsData, ) from .conversationhistory import ( ConversationHistory, - ConversationHistoryObject, ConversationHistoryTypedDict, Entry, EntryTypedDict, @@ -260,7 +313,6 @@ from .conversationinputs import ConversationInputs, ConversationInputsTypedDict from .conversationmessages import ( ConversationMessages, - ConversationMessagesObject, ConversationMessagesTypedDict, ) from .conversationrequest import ( @@ -274,7 +326,6 @@ ) from .conversationresponse import ( ConversationResponse, - ConversationResponseObject, ConversationResponseTypedDict, Output, OutputTypedDict, @@ -302,38 +353,46 @@ ConversationStreamRequestToolTypedDict, ConversationStreamRequestTypedDict, ) + from .conversationthinkchunk import ( + ConversationThinkChunk, + ConversationThinkChunkThinking, + ConversationThinkChunkThinkingTypedDict, + ConversationThinkChunkTypedDict, + ) from .conversationusageinfo import ( ConversationUsageInfo, ConversationUsageInfoTypedDict, ) - from .createfinetuningjobop import ( - CreateFineTuningJobResponse, - CreateFineTuningJobResponseTypedDict, - Response, - ResponseTypedDict, - ) - from .createorupdateagentaliasop import ( - CreateOrUpdateAgentAliasRequest, - CreateOrUpdateAgentAliasRequestTypedDict, - ) - from .deleteagentaliasop import ( - DeleteAgentAliasRequest, - DeleteAgentAliasRequestTypedDict, + from .createagentrequest import ( + CreateAgentRequest, + CreateAgentRequestTool, + CreateAgentRequestToolTypedDict, + CreateAgentRequestTypedDict, + ) + from .createbatchjobrequest import ( + CreateBatchJobRequest, + CreateBatchJobRequestTypedDict, + ) + from .createfileresponse import CreateFileResponse, CreateFileResponseTypedDict + from .createfinetuningjobrequest import ( + CreateFineTuningJobRequest, + CreateFineTuningJobRequestIntegration, + CreateFineTuningJobRequestIntegrationTypedDict, + CreateFineTuningJobRequestRepository, + CreateFineTuningJobRequestRepositoryTypedDict, + CreateFineTuningJobRequestTypedDict, + Hyperparameters, + HyperparametersTypedDict, ) - from .deleteagentop import DeleteAgentRequest, DeleteAgentRequestTypedDict - from .deleteconversationop import ( - DeleteConversationRequest, - DeleteConversationRequestTypedDict, + from .createlibraryrequest import ( + CreateLibraryRequest, + CreateLibraryRequestTypedDict, ) - from .deletedocumentop import DeleteDocumentRequest, DeleteDocumentRequestTypedDict - from .deletefileop import DeleteFileRequest, DeleteFileRequestTypedDict - from .deletefileout import DeleteFileOut, DeleteFileOutTypedDict - from .deletelibraryaccessop import ( - DeleteLibraryAccessRequest, - DeleteLibraryAccessRequestTypedDict, + from .delete_model_v1_models_model_id_deleteop import ( + DeleteModelV1ModelsModelIDDeleteRequest, + DeleteModelV1ModelsModelIDDeleteRequestTypedDict, ) - from .deletelibraryop import DeleteLibraryRequest, DeleteLibraryRequestTypedDict - from .deletemodelop import DeleteModelRequest, DeleteModelRequestTypedDict + from .deletefileresponse import DeleteFileResponse, DeleteFileResponseTypedDict from .deletemodelout import DeleteModelOut, DeleteModelOutTypedDict from .deltamessage import ( DeltaMessage, @@ -341,21 +400,10 @@ DeltaMessageContentTypedDict, DeltaMessageTypedDict, ) + from .document import Document, DocumentTypedDict from .documentlibrarytool import DocumentLibraryTool, DocumentLibraryToolTypedDict - from .documentout import DocumentOut, DocumentOutTypedDict from .documenttextcontent import DocumentTextContent, DocumentTextContentTypedDict - from .documentupdatein import ( - Attributes, - AttributesTypedDict, - DocumentUpdateIn, - DocumentUpdateInTypedDict, - ) - from .documenturlchunk import ( - DocumentURLChunk, - DocumentURLChunkType, - DocumentURLChunkTypedDict, - ) - from .downloadfileop import DownloadFileRequest, DownloadFileRequestTypedDict + from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict from .embeddingdtype import EmbeddingDtype from .embeddingrequest import ( EmbeddingRequest, @@ -370,12 +418,35 @@ ) from .encodingformat import EncodingFormat from .entitytype import EntityType - from .eventout import EventOut, EventOutTypedDict + from .event import Event, EventTypedDict from .file import File, FileTypedDict from .filechunk import FileChunk, FileChunkTypedDict from .filepurpose import FilePurpose + from .files_api_routes_delete_fileop import ( + FilesAPIRoutesDeleteFileRequest, + FilesAPIRoutesDeleteFileRequestTypedDict, + ) + from .files_api_routes_download_fileop import ( + FilesAPIRoutesDownloadFileRequest, + FilesAPIRoutesDownloadFileRequestTypedDict, + ) + from .files_api_routes_get_signed_urlop import ( + FilesAPIRoutesGetSignedURLRequest, + FilesAPIRoutesGetSignedURLRequestTypedDict, + ) + from .files_api_routes_list_filesop import ( + FilesAPIRoutesListFilesRequest, + FilesAPIRoutesListFilesRequestTypedDict, + ) + from .files_api_routes_retrieve_fileop import ( + FilesAPIRoutesRetrieveFileRequest, + FilesAPIRoutesRetrieveFileRequestTypedDict, + ) + from .files_api_routes_upload_fileop import ( + MultiPartBodyParams, + MultiPartBodyParamsTypedDict, + ) from .fileschema import FileSchema, FileSchemaTypedDict - from .filesignedurl import FileSignedURL, FileSignedURLTypedDict from .fimcompletionrequest import ( FIMCompletionRequest, FIMCompletionRequestStop, @@ -393,11 +464,11 @@ FIMCompletionStreamRequestTypedDict, ) from .finetuneablemodeltype import FineTuneableModelType - from .ftclassifierlossfunction import FTClassifierLossFunction - from .ftmodelcapabilitiesout import ( - FTModelCapabilitiesOut, - FTModelCapabilitiesOutTypedDict, + from .finetunedmodelcapabilities import ( + FineTunedModelCapabilities, + FineTunedModelCapabilitiesTypedDict, ) + from .ftclassifierlossfunction import FTClassifierLossFunction from .ftmodelcard import FTModelCard, FTModelCardTypedDict from .function import Function, FunctionTypedDict from .functioncall import ( @@ -408,157 +479,196 @@ ) from .functioncallentry import ( FunctionCallEntry, - FunctionCallEntryObject, - FunctionCallEntryType, + FunctionCallEntryConfirmationStatus, FunctionCallEntryTypedDict, ) from .functioncallentryarguments import ( FunctionCallEntryArguments, FunctionCallEntryArgumentsTypedDict, ) - from .functioncallevent import FunctionCallEvent, FunctionCallEventTypedDict - from .functionname import FunctionName, FunctionNameTypedDict - from .functionresultentry import ( - FunctionResultEntry, - FunctionResultEntryObject, - FunctionResultEntryType, - FunctionResultEntryTypedDict, + from .functioncallevent import ( + FunctionCallEvent, + FunctionCallEventConfirmationStatus, + FunctionCallEventTypedDict, ) + from .functionname import FunctionName, FunctionNameTypedDict + from .functionresultentry import FunctionResultEntry, FunctionResultEntryTypedDict from .functiontool import FunctionTool, FunctionToolTypedDict - from .getagentop import ( - GetAgentAgentVersion, - GetAgentAgentVersionTypedDict, - GetAgentRequest, - GetAgentRequestTypedDict, - ) - from .getagentversionop import ( - GetAgentVersionRequest, - GetAgentVersionRequestTypedDict, - ) - from .getbatchjobop import GetBatchJobRequest, GetBatchJobRequestTypedDict - from .getconversationhistoryop import ( - GetConversationHistoryRequest, - GetConversationHistoryRequestTypedDict, - ) - from .getconversationmessagesop import ( - GetConversationMessagesRequest, - GetConversationMessagesRequestTypedDict, - ) - from .getconversationop import ( - GetConversationRequest, - GetConversationRequestTypedDict, - ResponseV1ConversationsGet, - ResponseV1ConversationsGetTypedDict, - ) - from .getdocumentextractedtextsignedurlop import ( - GetDocumentExtractedTextSignedURLRequest, - GetDocumentExtractedTextSignedURLRequestTypedDict, - ) - from .getdocumentop import GetDocumentRequest, GetDocumentRequestTypedDict - from .getdocumentsignedurlop import ( - GetDocumentSignedURLRequest, - GetDocumentSignedURLRequestTypedDict, - ) - from .getdocumentstatusop import ( - GetDocumentStatusRequest, - GetDocumentStatusRequestTypedDict, - ) - from .getdocumenttextcontentop import ( - GetDocumentTextContentRequest, - GetDocumentTextContentRequestTypedDict, - ) - from .getfilesignedurlop import ( - GetFileSignedURLRequest, - GetFileSignedURLRequestTypedDict, + from .getfileresponse import GetFileResponse, GetFileResponseTypedDict + from .getsignedurlresponse import ( + GetSignedURLResponse, + GetSignedURLResponseTypedDict, ) - from .getfinetuningjobop import ( - GetFineTuningJobRequest, - GetFineTuningJobRequestTypedDict, - GetFineTuningJobResponse, - GetFineTuningJobResponseTypedDict, - ) - from .getlibraryop import GetLibraryRequest, GetLibraryRequestTypedDict + from .githubrepository import GithubRepository, GithubRepositoryTypedDict from .githubrepositoryin import GithubRepositoryIn, GithubRepositoryInTypedDict - from .githubrepositoryout import GithubRepositoryOut, GithubRepositoryOutTypedDict - from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData + from .imagedetail import ImageDetail from .imagegenerationtool import ImageGenerationTool, ImageGenerationToolTypedDict from .imageurl import ImageURL, ImageURLTypedDict from .imageurlchunk import ( ImageURLChunk, - ImageURLChunkType, ImageURLChunkTypedDict, ImageURLUnion, ImageURLUnionTypedDict, ) from .inputentries import InputEntries, InputEntriesTypedDict - from .inputs import ( - Inputs, - InputsMessage, - InputsMessageTypedDict, - InputsTypedDict, - InstructRequestInputs, - InstructRequestInputsTypedDict, - ) + from .inputs import Inputs, InputsTypedDict from .instructrequest import ( InstructRequest, InstructRequestMessage, InstructRequestMessageTypedDict, InstructRequestTypedDict, ) - from .jobin import ( - Hyperparameters, - HyperparametersTypedDict, - JobIn, - JobInIntegration, - JobInIntegrationTypedDict, - JobInRepository, - JobInRepositoryTypedDict, - JobInTypedDict, - ) - from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict - from .jobsout import JobsOut, JobsOutData, JobsOutDataTypedDict, JobsOutTypedDict - from .jsonschema import JSONSchema, JSONSchemaTypedDict - from .legacyjobmetadataout import ( - LegacyJobMetadataOut, - LegacyJobMetadataOutTypedDict, - ) - from .libraryin import LibraryIn, LibraryInTypedDict - from .libraryinupdate import LibraryInUpdate, LibraryInUpdateTypedDict - from .libraryout import LibraryOut, LibraryOutTypedDict - from .listagentaliasesop import ( - ListAgentAliasesRequest, - ListAgentAliasesRequestTypedDict, - ) - from .listagentsop import ListAgentsRequest, ListAgentsRequestTypedDict - from .listagentversionsop import ( - ListAgentVersionsRequest, - ListAgentVersionsRequestTypedDict, - ) - from .listbatchjobsop import ( - ListBatchJobsRequest, - ListBatchJobsRequestTypedDict, + from .jobmetadata import JobMetadata, JobMetadataTypedDict + from .jobs_api_routes_batch_cancel_batch_jobop import ( + JobsAPIRoutesBatchCancelBatchJobRequest, + JobsAPIRoutesBatchCancelBatchJobRequestTypedDict, + ) + from .jobs_api_routes_batch_get_batch_jobop import ( + JobsAPIRoutesBatchGetBatchJobRequest, + JobsAPIRoutesBatchGetBatchJobRequestTypedDict, + ) + from .jobs_api_routes_batch_get_batch_jobsop import ( + JobsAPIRoutesBatchGetBatchJobsRequest, + JobsAPIRoutesBatchGetBatchJobsRequestTypedDict, OrderBy, ) - from .listconversationsop import ( - ListConversationsRequest, - ListConversationsRequestTypedDict, - ListConversationsResponse, - ListConversationsResponseTypedDict, - ) - from .listdocumentout import ListDocumentOut, ListDocumentOutTypedDict - from .listdocumentsop import ListDocumentsRequest, ListDocumentsRequestTypedDict - from .listfilesop import ListFilesRequest, ListFilesRequestTypedDict - from .listfilesout import ListFilesOut, ListFilesOutTypedDict - from .listfinetuningjobsop import ( - ListFineTuningJobsRequest, - ListFineTuningJobsRequestTypedDict, - ListFineTuningJobsStatus, - ) - from .listlibraryaccessesop import ( - ListLibraryAccessesRequest, - ListLibraryAccessesRequestTypedDict, - ) - from .listlibraryout import ListLibraryOut, ListLibraryOutTypedDict + from .jobs_api_routes_fine_tuning_archive_fine_tuned_modelop import ( + JobsAPIRoutesFineTuningArchiveFineTunedModelRequest, + JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict, + ) + from .jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop import ( + JobsAPIRoutesFineTuningCancelFineTuningJobRequest, + JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict, + JobsAPIRoutesFineTuningCancelFineTuningJobResponse, + JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict, + UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse, + ) + from .jobs_api_routes_fine_tuning_create_fine_tuning_jobop import ( + JobsAPIRoutesFineTuningCreateFineTuningJobResponse, + JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict, + Response, + ResponseTypedDict, + UnknownResponse, + ) + from .jobs_api_routes_fine_tuning_get_fine_tuning_jobop import ( + JobsAPIRoutesFineTuningGetFineTuningJobRequest, + JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict, + JobsAPIRoutesFineTuningGetFineTuningJobResponse, + JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict, + UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse, + ) + from .jobs_api_routes_fine_tuning_get_fine_tuning_jobsop import ( + JobsAPIRoutesFineTuningGetFineTuningJobsRequest, + JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict, + JobsAPIRoutesFineTuningGetFineTuningJobsStatus, + ) + from .jobs_api_routes_fine_tuning_start_fine_tuning_jobop import ( + JobsAPIRoutesFineTuningStartFineTuningJobRequest, + JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict, + JobsAPIRoutesFineTuningStartFineTuningJobResponse, + JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict, + UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse, + ) + from .jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop import ( + JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest, + JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict, + ) + from .jobs_api_routes_fine_tuning_update_fine_tuned_modelop import ( + JobsAPIRoutesFineTuningUpdateFineTunedModelRequest, + JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict, + JobsAPIRoutesFineTuningUpdateFineTunedModelResponse, + JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict, + UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse, + ) + from .jsonschema import JSONSchema, JSONSchemaTypedDict + from .legacyjobmetadata import LegacyJobMetadata, LegacyJobMetadataTypedDict + from .libraries_delete_v1op import ( + LibrariesDeleteV1Request, + LibrariesDeleteV1RequestTypedDict, + ) + from .libraries_documents_delete_v1op import ( + LibrariesDocumentsDeleteV1Request, + LibrariesDocumentsDeleteV1RequestTypedDict, + ) + from .libraries_documents_get_extracted_text_signed_url_v1op import ( + LibrariesDocumentsGetExtractedTextSignedURLV1Request, + LibrariesDocumentsGetExtractedTextSignedURLV1RequestTypedDict, + ) + from .libraries_documents_get_signed_url_v1op import ( + LibrariesDocumentsGetSignedURLV1Request, + LibrariesDocumentsGetSignedURLV1RequestTypedDict, + ) + from .libraries_documents_get_status_v1op import ( + LibrariesDocumentsGetStatusV1Request, + LibrariesDocumentsGetStatusV1RequestTypedDict, + ) + from .libraries_documents_get_text_content_v1op import ( + LibrariesDocumentsGetTextContentV1Request, + LibrariesDocumentsGetTextContentV1RequestTypedDict, + ) + from .libraries_documents_get_v1op import ( + LibrariesDocumentsGetV1Request, + LibrariesDocumentsGetV1RequestTypedDict, + ) + from .libraries_documents_list_v1op import ( + LibrariesDocumentsListV1Request, + LibrariesDocumentsListV1RequestTypedDict, + ) + from .libraries_documents_reprocess_v1op import ( + LibrariesDocumentsReprocessV1Request, + LibrariesDocumentsReprocessV1RequestTypedDict, + ) + from .libraries_documents_update_v1op import ( + LibrariesDocumentsUpdateV1Request, + LibrariesDocumentsUpdateV1RequestTypedDict, + ) + from .libraries_documents_upload_v1op import ( + DocumentUpload, + DocumentUploadTypedDict, + LibrariesDocumentsUploadV1Request, + LibrariesDocumentsUploadV1RequestTypedDict, + ) + from .libraries_get_v1op import ( + LibrariesGetV1Request, + LibrariesGetV1RequestTypedDict, + ) + from .libraries_share_create_v1op import ( + LibrariesShareCreateV1Request, + LibrariesShareCreateV1RequestTypedDict, + ) + from .libraries_share_delete_v1op import ( + LibrariesShareDeleteV1Request, + LibrariesShareDeleteV1RequestTypedDict, + ) + from .libraries_share_list_v1op import ( + LibrariesShareListV1Request, + LibrariesShareListV1RequestTypedDict, + ) + from .libraries_update_v1op import ( + LibrariesUpdateV1Request, + LibrariesUpdateV1RequestTypedDict, + ) + from .library import Library, LibraryTypedDict + from .listbatchjobsresponse import ( + ListBatchJobsResponse, + ListBatchJobsResponseTypedDict, + ) + from .listdocumentsresponse import ( + ListDocumentsResponse, + ListDocumentsResponseTypedDict, + ) + from .listfilesresponse import ListFilesResponse, ListFilesResponseTypedDict + from .listfinetuningjobsresponse import ( + ListFineTuningJobsResponse, + ListFineTuningJobsResponseData, + ListFineTuningJobsResponseDataTypedDict, + ListFineTuningJobsResponseTypedDict, + UnknownListFineTuningJobsResponseData, + ) + from .listlibrariesresponse import ( + ListLibrariesResponse, + ListLibrariesResponseTypedDict, + ) from .listsharingout import ListSharingOut, ListSharingOutTypedDict from .messageentries import MessageEntries, MessageEntriesTypedDict from .messageinputcontentchunks import ( @@ -569,10 +679,8 @@ MessageInputEntry, MessageInputEntryContent, MessageInputEntryContentTypedDict, - MessageInputEntryObject, - MessageInputEntryRole, - MessageInputEntryType, MessageInputEntryTypedDict, + Role, ) from .messageoutputcontentchunks import ( MessageOutputContentChunks, @@ -582,43 +690,39 @@ MessageOutputEntry, MessageOutputEntryContent, MessageOutputEntryContentTypedDict, - MessageOutputEntryObject, - MessageOutputEntryRole, - MessageOutputEntryType, MessageOutputEntryTypedDict, ) from .messageoutputevent import ( MessageOutputEvent, MessageOutputEventContent, MessageOutputEventContentTypedDict, - MessageOutputEventRole, MessageOutputEventTypedDict, ) - from .metricout import MetricOut, MetricOutTypedDict + from .metric import Metric, MetricTypedDict from .mistralpromptmode import MistralPromptMode from .modelcapabilities import ModelCapabilities, ModelCapabilitiesTypedDict from .modelconversation import ( ModelConversation, - ModelConversationObject, ModelConversationTool, ModelConversationToolTypedDict, ModelConversationTypedDict, + UnknownModelConversationTool, ) from .modellist import ( ModelList, ModelListData, ModelListDataTypedDict, ModelListTypedDict, + UnknownModelListData, ) from .moderationobject import ModerationObject, ModerationObjectTypedDict from .moderationresponse import ModerationResponse, ModerationResponseTypedDict - from .no_response_error import NoResponseError from .ocrimageobject import OCRImageObject, OCRImageObjectTypedDict from .ocrpagedimensions import OCRPageDimensions, OCRPageDimensionsTypedDict from .ocrpageobject import OCRPageObject, OCRPageObjectTypedDict from .ocrrequest import ( - Document, - DocumentTypedDict, + DocumentUnion, + DocumentUnionTypedDict, OCRRequest, OCRRequestTypedDict, TableFormat, @@ -640,6 +744,18 @@ RealtimeTranscriptionErrorDetailMessageTypedDict, RealtimeTranscriptionErrorDetailTypedDict, ) + from .realtimetranscriptioninputaudioappend import ( + RealtimeTranscriptionInputAudioAppend, + RealtimeTranscriptionInputAudioAppendTypedDict, + ) + from .realtimetranscriptioninputaudioend import ( + RealtimeTranscriptionInputAudioEnd, + RealtimeTranscriptionInputAudioEndTypedDict, + ) + from .realtimetranscriptioninputaudioflush import ( + RealtimeTranscriptionInputAudioFlush, + RealtimeTranscriptionInputAudioFlushTypedDict, + ) from .realtimetranscriptionsession import ( RealtimeTranscriptionSession, RealtimeTranscriptionSessionTypedDict, @@ -652,15 +768,15 @@ RealtimeTranscriptionSessionUpdated, RealtimeTranscriptionSessionUpdatedTypedDict, ) - from .referencechunk import ( - ReferenceChunk, - ReferenceChunkType, - ReferenceChunkTypedDict, + from .realtimetranscriptionsessionupdatemessage import ( + RealtimeTranscriptionSessionUpdateMessage, + RealtimeTranscriptionSessionUpdateMessageTypedDict, ) - from .reprocessdocumentop import ( - ReprocessDocumentRequest, - ReprocessDocumentRequestTypedDict, + from .realtimetranscriptionsessionupdatepayload import ( + RealtimeTranscriptionSessionUpdatePayload, + RealtimeTranscriptionSessionUpdatePayloadTypedDict, ) + from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .requestsource import RequestSource from .responsedoneevent import ResponseDoneEvent, ResponseDoneEventTypedDict from .responseerrorevent import ResponseErrorEvent, ResponseErrorEventTypedDict @@ -670,25 +786,14 @@ ResponseStartedEvent, ResponseStartedEventTypedDict, ) - from .responsevalidationerror import ResponseValidationError - from .restartconversationop import ( - RestartConversationRequest, - RestartConversationRequestTypedDict, - ) - from .restartconversationstreamop import ( - RestartConversationStreamRequest, - RestartConversationStreamRequestTypedDict, - ) - from .retrievefileop import RetrieveFileRequest, RetrieveFileRequestTypedDict - from .retrievefileout import RetrieveFileOut, RetrieveFileOutTypedDict - from .retrievemodelop import ( + from .retrieve_model_v1_models_model_id_getop import ( ResponseRetrieveModelV1ModelsModelIDGet, ResponseRetrieveModelV1ModelsModelIDGetTypedDict, - RetrieveModelRequest, - RetrieveModelRequestTypedDict, + RetrieveModelV1ModelsModelIDGetRequest, + RetrieveModelV1ModelsModelIDGetRequestTypedDict, + UnknownResponseRetrieveModelV1ModelsModelIDGet, ) from .sampletype import SampleType - from .sdkerror import SDKError from .security import Security, SecurityTypedDict from .shareenum import ShareEnum from .sharingdelete import SharingDelete, SharingDeleteTypedDict @@ -696,12 +801,6 @@ from .sharingout import SharingOut, SharingOutTypedDict from .source import Source from .ssetypes import SSETypes - from .startfinetuningjobop import ( - StartFineTuningJobRequest, - StartFineTuningJobRequestTypedDict, - StartFineTuningJobResponse, - StartFineTuningJobResponseTypedDict, - ) from .systemmessage import ( SystemMessage, SystemMessageContent, @@ -712,19 +811,24 @@ SystemMessageContentChunks, SystemMessageContentChunksTypedDict, ) - from .textchunk import TextChunk, TextChunkType, TextChunkTypedDict + from .textchunk import TextChunk, TextChunkTypedDict from .thinkchunk import ( ThinkChunk, - ThinkChunkType, + ThinkChunkThinking, + ThinkChunkThinkingTypedDict, ThinkChunkTypedDict, - Thinking, - ThinkingTypedDict, ) from .timestampgranularity import TimestampGranularity from .tool import Tool, ToolTypedDict from .toolcall import ToolCall, ToolCallTypedDict + from .toolcallconfirmation import ( + Confirmation, + ToolCallConfirmation, + ToolCallConfirmationTypedDict, + ) from .toolchoice import ToolChoice, ToolChoiceTypedDict from .toolchoiceenum import ToolChoiceEnum + from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict from .toolexecutiondeltaevent import ( ToolExecutionDeltaEvent, ToolExecutionDeltaEventName, @@ -741,8 +845,6 @@ ToolExecutionEntry, ToolExecutionEntryName, ToolExecutionEntryNameTypedDict, - ToolExecutionEntryObject, - ToolExecutionEntryType, ToolExecutionEntryTypedDict, ) from .toolexecutionstartedevent import ( @@ -755,7 +857,6 @@ ToolFileChunk, ToolFileChunkTool, ToolFileChunkToolTypedDict, - ToolFileChunkType, ToolFileChunkTypedDict, ) from .toolmessage import ( @@ -768,7 +869,6 @@ ToolReferenceChunk, ToolReferenceChunkTool, ToolReferenceChunkToolTypedDict, - ToolReferenceChunkType, ToolReferenceChunkTypedDict, ) from .tooltypes import ToolTypes @@ -779,7 +879,6 @@ ) from .transcriptionsegmentchunk import ( TranscriptionSegmentChunk, - TranscriptionSegmentChunkType, TranscriptionSegmentChunkTypedDict, ) from .transcriptionstreamdone import ( @@ -791,6 +890,7 @@ TranscriptionStreamEventsData, TranscriptionStreamEventsDataTypedDict, TranscriptionStreamEventsTypedDict, + UnknownTranscriptionStreamEventsData, ) from .transcriptionstreameventtypes import TranscriptionStreamEventTypes from .transcriptionstreamlanguage import ( @@ -805,34 +905,27 @@ TranscriptionStreamTextDelta, TranscriptionStreamTextDeltaTypedDict, ) - from .unarchiveftmodelout import UnarchiveFTModelOut, UnarchiveFTModelOutTypedDict - from .unarchivemodelop import UnarchiveModelRequest, UnarchiveModelRequestTypedDict - from .updateagentop import UpdateAgentRequest, UpdateAgentRequestTypedDict - from .updateagentversionop import ( - UpdateAgentVersionRequest, - UpdateAgentVersionRequestTypedDict, - ) - from .updatedocumentop import UpdateDocumentRequest, UpdateDocumentRequestTypedDict - from .updateftmodelin import UpdateFTModelIn, UpdateFTModelInTypedDict - from .updatelibraryop import UpdateLibraryRequest, UpdateLibraryRequestTypedDict - from .updatemodelop import ( - UpdateModelRequest, - UpdateModelRequestTypedDict, - UpdateModelResponse, - UpdateModelResponseTypedDict, - ) - from .updateorcreatelibraryaccessop import ( - UpdateOrCreateLibraryAccessRequest, - UpdateOrCreateLibraryAccessRequestTypedDict, - ) - from .uploaddocumentop import ( - DocumentUpload, - DocumentUploadTypedDict, - UploadDocumentRequest, - UploadDocumentRequestTypedDict, + from .unarchivemodelresponse import ( + UnarchiveModelResponse, + UnarchiveModelResponseTypedDict, + ) + from .updateagentrequest import ( + UpdateAgentRequest, + UpdateAgentRequestTool, + UpdateAgentRequestToolTypedDict, + UpdateAgentRequestTypedDict, + ) + from .updatedocumentrequest import ( + Attributes, + AttributesTypedDict, + UpdateDocumentRequest, + UpdateDocumentRequestTypedDict, ) - from .uploadfileop import MultiPartBodyParams, MultiPartBodyParamsTypedDict - from .uploadfileout import UploadFileOut, UploadFileOutTypedDict + from .updatelibraryrequest import ( + UpdateLibraryRequest, + UpdateLibraryRequestTypedDict, + ) + from .updatemodelrequest import UpdateModelRequest, UpdateModelRequestTypedDict from .usageinfo import UsageInfo, UsageInfoTypedDict from .usermessage import ( UserMessage, @@ -847,7 +940,10 @@ ValidationErrorTypedDict, ) from .wandbintegration import WandbIntegration, WandbIntegrationTypedDict - from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict + from .wandbintegrationresult import ( + WandbIntegrationResult, + WandbIntegrationResultTypedDict, + ) from .websearchpremiumtool import ( WebSearchPremiumTool, WebSearchPremiumToolTypedDict, @@ -862,28 +958,58 @@ "AgentConversation", "AgentConversationAgentVersion", "AgentConversationAgentVersionTypedDict", - "AgentConversationObject", "AgentConversationTypedDict", - "AgentCreationRequest", - "AgentCreationRequestTool", - "AgentCreationRequestToolTypedDict", - "AgentCreationRequestTypedDict", "AgentHandoffDoneEvent", "AgentHandoffDoneEventTypedDict", "AgentHandoffEntry", - "AgentHandoffEntryObject", - "AgentHandoffEntryType", "AgentHandoffEntryTypedDict", "AgentHandoffStartedEvent", "AgentHandoffStartedEventTypedDict", - "AgentObject", "AgentTool", "AgentToolTypedDict", "AgentTypedDict", - "AgentUpdateRequest", - "AgentUpdateRequestTool", - "AgentUpdateRequestToolTypedDict", - "AgentUpdateRequestTypedDict", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequest", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict", + "AgentsAPIV1AgentsDeleteAliasRequest", + "AgentsAPIV1AgentsDeleteAliasRequestTypedDict", + "AgentsAPIV1AgentsDeleteRequest", + "AgentsAPIV1AgentsDeleteRequestTypedDict", + "AgentsAPIV1AgentsGetAgentVersion", + "AgentsAPIV1AgentsGetAgentVersionTypedDict", + "AgentsAPIV1AgentsGetRequest", + "AgentsAPIV1AgentsGetRequestTypedDict", + "AgentsAPIV1AgentsGetVersionRequest", + "AgentsAPIV1AgentsGetVersionRequestTypedDict", + "AgentsAPIV1AgentsListRequest", + "AgentsAPIV1AgentsListRequestTypedDict", + "AgentsAPIV1AgentsListVersionAliasesRequest", + "AgentsAPIV1AgentsListVersionAliasesRequestTypedDict", + "AgentsAPIV1AgentsListVersionsRequest", + "AgentsAPIV1AgentsListVersionsRequestTypedDict", + "AgentsAPIV1AgentsUpdateRequest", + "AgentsAPIV1AgentsUpdateRequestTypedDict", + "AgentsAPIV1AgentsUpdateVersionRequest", + "AgentsAPIV1AgentsUpdateVersionRequestTypedDict", + "AgentsAPIV1ConversationsAppendRequest", + "AgentsAPIV1ConversationsAppendRequestTypedDict", + "AgentsAPIV1ConversationsAppendStreamRequest", + "AgentsAPIV1ConversationsAppendStreamRequestTypedDict", + "AgentsAPIV1ConversationsDeleteRequest", + "AgentsAPIV1ConversationsDeleteRequestTypedDict", + "AgentsAPIV1ConversationsGetRequest", + "AgentsAPIV1ConversationsGetRequestTypedDict", + "AgentsAPIV1ConversationsHistoryRequest", + "AgentsAPIV1ConversationsHistoryRequestTypedDict", + "AgentsAPIV1ConversationsListRequest", + "AgentsAPIV1ConversationsListRequestTypedDict", + "AgentsAPIV1ConversationsListResponse", + "AgentsAPIV1ConversationsListResponseTypedDict", + "AgentsAPIV1ConversationsMessagesRequest", + "AgentsAPIV1ConversationsMessagesRequestTypedDict", + "AgentsAPIV1ConversationsRestartRequest", + "AgentsAPIV1ConversationsRestartRequestTypedDict", + "AgentsAPIV1ConversationsRestartStreamRequest", + "AgentsAPIV1ConversationsRestartStreamRequestTypedDict", "AgentsCompletionRequest", "AgentsCompletionRequestMessage", "AgentsCompletionRequestMessageTypedDict", @@ -900,20 +1026,13 @@ "AgentsCompletionStreamRequestToolChoice", "AgentsCompletionStreamRequestToolChoiceTypedDict", "AgentsCompletionStreamRequestTypedDict", - "AppendConversationRequest", - "AppendConversationRequestTypedDict", - "AppendConversationStreamRequest", - "AppendConversationStreamRequestTypedDict", - "ArchiveFTModelOut", - "ArchiveFTModelOutTypedDict", - "ArchiveModelRequest", - "ArchiveModelRequestTypedDict", + "ArchiveModelResponse", + "ArchiveModelResponseTypedDict", "Arguments", "ArgumentsTypedDict", "AssistantMessage", "AssistantMessageContent", "AssistantMessageContentTypedDict", - "AssistantMessageRole", "AssistantMessageTypedDict", "Attributes", "AttributesTypedDict", @@ -930,22 +1049,12 @@ "BaseModelCardTypedDict", "BatchError", "BatchErrorTypedDict", - "BatchJobIn", - "BatchJobInTypedDict", - "BatchJobOut", - "BatchJobOutTypedDict", + "BatchJob", "BatchJobStatus", - "BatchJobsOut", - "BatchJobsOutTypedDict", + "BatchJobTypedDict", "BatchRequest", "BatchRequestTypedDict", "BuiltInConnectors", - "CancelBatchJobRequest", - "CancelBatchJobRequestTypedDict", - "CancelFineTuningJobRequest", - "CancelFineTuningJobRequestTypedDict", - "CancelFineTuningJobResponse", - "CancelFineTuningJobResponseTypedDict", "ChatClassificationRequest", "ChatClassificationRequestTypedDict", "ChatCompletionChoice", @@ -977,8 +1086,8 @@ "ChatModerationRequestInputs3", "ChatModerationRequestInputs3TypedDict", "ChatModerationRequestTypedDict", - "CheckpointOut", - "CheckpointOutTypedDict", + "Checkpoint", + "CheckpointTypedDict", "ClassificationRequest", "ClassificationRequestInputs", "ClassificationRequestInputsTypedDict", @@ -987,25 +1096,23 @@ "ClassificationResponseTypedDict", "ClassificationTargetResult", "ClassificationTargetResultTypedDict", - "ClassifierDetailedJobOut", - "ClassifierDetailedJobOutIntegration", - "ClassifierDetailedJobOutIntegrationTypedDict", - "ClassifierDetailedJobOutStatus", - "ClassifierDetailedJobOutTypedDict", - "ClassifierFTModelOut", - "ClassifierFTModelOutTypedDict", - "ClassifierJobOut", - "ClassifierJobOutIntegration", - "ClassifierJobOutIntegrationTypedDict", - "ClassifierJobOutStatus", - "ClassifierJobOutTypedDict", - "ClassifierTargetIn", - "ClassifierTargetInTypedDict", - "ClassifierTargetOut", - "ClassifierTargetOutTypedDict", + "ClassifierFineTunedModel", + "ClassifierFineTunedModelTypedDict", + "ClassifierFineTuningJob", + "ClassifierFineTuningJobDetails", + "ClassifierFineTuningJobDetailsIntegration", + "ClassifierFineTuningJobDetailsIntegrationTypedDict", + "ClassifierFineTuningJobDetailsStatus", + "ClassifierFineTuningJobDetailsTypedDict", + "ClassifierFineTuningJobIntegration", + "ClassifierFineTuningJobIntegrationTypedDict", + "ClassifierFineTuningJobStatus", + "ClassifierFineTuningJobTypedDict", + "ClassifierTarget", + "ClassifierTargetResult", + "ClassifierTargetResultTypedDict", + "ClassifierTargetTypedDict", "ClassifierTrainingParameters", - "ClassifierTrainingParametersIn", - "ClassifierTrainingParametersInTypedDict", "ClassifierTrainingParametersTypedDict", "CodeInterpreterTool", "CodeInterpreterToolTypedDict", @@ -1015,31 +1122,30 @@ "CompletionArgsTypedDict", "CompletionChunk", "CompletionChunkTypedDict", - "CompletionDetailedJobOut", - "CompletionDetailedJobOutIntegration", - "CompletionDetailedJobOutIntegrationTypedDict", - "CompletionDetailedJobOutRepository", - "CompletionDetailedJobOutRepositoryTypedDict", - "CompletionDetailedJobOutStatus", - "CompletionDetailedJobOutTypedDict", "CompletionEvent", "CompletionEventTypedDict", - "CompletionFTModelOut", - "CompletionFTModelOutTypedDict", - "CompletionJobOut", - "CompletionJobOutIntegration", - "CompletionJobOutIntegrationTypedDict", - "CompletionJobOutRepository", - "CompletionJobOutRepositoryTypedDict", - "CompletionJobOutStatus", - "CompletionJobOutTypedDict", + "CompletionFineTunedModel", + "CompletionFineTunedModelTypedDict", + "CompletionFineTuningJob", + "CompletionFineTuningJobDetails", + "CompletionFineTuningJobDetailsIntegration", + "CompletionFineTuningJobDetailsIntegrationTypedDict", + "CompletionFineTuningJobDetailsRepository", + "CompletionFineTuningJobDetailsRepositoryTypedDict", + "CompletionFineTuningJobDetailsStatus", + "CompletionFineTuningJobDetailsTypedDict", + "CompletionFineTuningJobIntegration", + "CompletionFineTuningJobIntegrationTypedDict", + "CompletionFineTuningJobRepository", + "CompletionFineTuningJobRepositoryTypedDict", + "CompletionFineTuningJobStatus", + "CompletionFineTuningJobTypedDict", "CompletionResponseStreamChoice", "CompletionResponseStreamChoiceFinishReason", "CompletionResponseStreamChoiceTypedDict", "CompletionTrainingParameters", - "CompletionTrainingParametersIn", - "CompletionTrainingParametersInTypedDict", "CompletionTrainingParametersTypedDict", + "Confirmation", "ContentChunk", "ContentChunkTypedDict", "ConversationAppendRequest", @@ -1053,12 +1159,10 @@ "ConversationEventsDataTypedDict", "ConversationEventsTypedDict", "ConversationHistory", - "ConversationHistoryObject", "ConversationHistoryTypedDict", "ConversationInputs", "ConversationInputsTypedDict", "ConversationMessages", - "ConversationMessagesObject", "ConversationMessagesTypedDict", "ConversationRequest", "ConversationRequestAgentVersion", @@ -1068,7 +1172,6 @@ "ConversationRequestToolTypedDict", "ConversationRequestTypedDict", "ConversationResponse", - "ConversationResponseObject", "ConversationResponseTypedDict", "ConversationRestartRequest", "ConversationRestartRequestAgentVersion", @@ -1087,32 +1190,34 @@ "ConversationStreamRequestTool", "ConversationStreamRequestToolTypedDict", "ConversationStreamRequestTypedDict", + "ConversationThinkChunk", + "ConversationThinkChunkThinking", + "ConversationThinkChunkThinkingTypedDict", + "ConversationThinkChunkTypedDict", "ConversationUsageInfo", "ConversationUsageInfoTypedDict", - "CreateFineTuningJobResponse", - "CreateFineTuningJobResponseTypedDict", - "CreateOrUpdateAgentAliasRequest", - "CreateOrUpdateAgentAliasRequestTypedDict", - "DeleteAgentAliasRequest", - "DeleteAgentAliasRequestTypedDict", - "DeleteAgentRequest", - "DeleteAgentRequestTypedDict", - "DeleteConversationRequest", - "DeleteConversationRequestTypedDict", - "DeleteDocumentRequest", - "DeleteDocumentRequestTypedDict", - "DeleteFileOut", - "DeleteFileOutTypedDict", - "DeleteFileRequest", - "DeleteFileRequestTypedDict", - "DeleteLibraryAccessRequest", - "DeleteLibraryAccessRequestTypedDict", - "DeleteLibraryRequest", - "DeleteLibraryRequestTypedDict", + "CreateAgentRequest", + "CreateAgentRequestTool", + "CreateAgentRequestToolTypedDict", + "CreateAgentRequestTypedDict", + "CreateBatchJobRequest", + "CreateBatchJobRequestTypedDict", + "CreateFileResponse", + "CreateFileResponseTypedDict", + "CreateFineTuningJobRequest", + "CreateFineTuningJobRequestIntegration", + "CreateFineTuningJobRequestIntegrationTypedDict", + "CreateFineTuningJobRequestRepository", + "CreateFineTuningJobRequestRepositoryTypedDict", + "CreateFineTuningJobRequestTypedDict", + "CreateLibraryRequest", + "CreateLibraryRequestTypedDict", + "DeleteFileResponse", + "DeleteFileResponseTypedDict", "DeleteModelOut", "DeleteModelOutTypedDict", - "DeleteModelRequest", - "DeleteModelRequestTypedDict", + "DeleteModelV1ModelsModelIDDeleteRequest", + "DeleteModelV1ModelsModelIDDeleteRequestTypedDict", "DeltaMessage", "DeltaMessageContent", "DeltaMessageContentTypedDict", @@ -1120,20 +1225,15 @@ "Document", "DocumentLibraryTool", "DocumentLibraryToolTypedDict", - "DocumentOut", - "DocumentOutTypedDict", "DocumentTextContent", "DocumentTextContentTypedDict", "DocumentTypedDict", "DocumentURLChunk", - "DocumentURLChunkType", "DocumentURLChunkTypedDict", - "DocumentUpdateIn", - "DocumentUpdateInTypedDict", + "DocumentUnion", + "DocumentUnionTypedDict", "DocumentUpload", "DocumentUploadTypedDict", - "DownloadFileRequest", - "DownloadFileRequestTypedDict", "EmbeddingDtype", "EmbeddingRequest", "EmbeddingRequestInputs", @@ -1147,8 +1247,8 @@ "EntityType", "Entry", "EntryTypedDict", - "EventOut", - "EventOutTypedDict", + "Event", + "EventTypedDict", "FIMCompletionRequest", "FIMCompletionRequestStop", "FIMCompletionRequestStopTypedDict", @@ -1160,8 +1260,6 @@ "FIMCompletionStreamRequestStopTypedDict", "FIMCompletionStreamRequestTypedDict", "FTClassifierLossFunction", - "FTModelCapabilitiesOut", - "FTModelCapabilitiesOutTypedDict", "FTModelCard", "FTModelCardTypedDict", "File", @@ -1170,76 +1268,54 @@ "FilePurpose", "FileSchema", "FileSchemaTypedDict", - "FileSignedURL", - "FileSignedURLTypedDict", "FileTypedDict", + "FilesAPIRoutesDeleteFileRequest", + "FilesAPIRoutesDeleteFileRequestTypedDict", + "FilesAPIRoutesDownloadFileRequest", + "FilesAPIRoutesDownloadFileRequestTypedDict", + "FilesAPIRoutesGetSignedURLRequest", + "FilesAPIRoutesGetSignedURLRequestTypedDict", + "FilesAPIRoutesListFilesRequest", + "FilesAPIRoutesListFilesRequestTypedDict", + "FilesAPIRoutesRetrieveFileRequest", + "FilesAPIRoutesRetrieveFileRequestTypedDict", "FineTuneableModelType", + "FineTunedModelCapabilities", + "FineTunedModelCapabilitiesTypedDict", "Format", "Function", "FunctionCall", "FunctionCallEntry", "FunctionCallEntryArguments", "FunctionCallEntryArgumentsTypedDict", - "FunctionCallEntryObject", - "FunctionCallEntryType", + "FunctionCallEntryConfirmationStatus", "FunctionCallEntryTypedDict", "FunctionCallEvent", + "FunctionCallEventConfirmationStatus", "FunctionCallEventTypedDict", "FunctionCallTypedDict", "FunctionName", "FunctionNameTypedDict", "FunctionResultEntry", - "FunctionResultEntryObject", - "FunctionResultEntryType", "FunctionResultEntryTypedDict", "FunctionTool", "FunctionToolTypedDict", "FunctionTypedDict", - "GetAgentAgentVersion", - "GetAgentAgentVersionTypedDict", - "GetAgentRequest", - "GetAgentRequestTypedDict", - "GetAgentVersionRequest", - "GetAgentVersionRequestTypedDict", - "GetBatchJobRequest", - "GetBatchJobRequestTypedDict", - "GetConversationHistoryRequest", - "GetConversationHistoryRequestTypedDict", - "GetConversationMessagesRequest", - "GetConversationMessagesRequestTypedDict", - "GetConversationRequest", - "GetConversationRequestTypedDict", - "GetDocumentExtractedTextSignedURLRequest", - "GetDocumentExtractedTextSignedURLRequestTypedDict", - "GetDocumentRequest", - "GetDocumentRequestTypedDict", - "GetDocumentSignedURLRequest", - "GetDocumentSignedURLRequestTypedDict", - "GetDocumentStatusRequest", - "GetDocumentStatusRequestTypedDict", - "GetDocumentTextContentRequest", - "GetDocumentTextContentRequestTypedDict", - "GetFileSignedURLRequest", - "GetFileSignedURLRequestTypedDict", - "GetFineTuningJobRequest", - "GetFineTuningJobRequestTypedDict", - "GetFineTuningJobResponse", - "GetFineTuningJobResponseTypedDict", - "GetLibraryRequest", - "GetLibraryRequestTypedDict", + "GetFileResponse", + "GetFileResponseTypedDict", + "GetSignedURLResponse", + "GetSignedURLResponseTypedDict", + "GithubRepository", "GithubRepositoryIn", "GithubRepositoryInTypedDict", - "GithubRepositoryOut", - "GithubRepositoryOutTypedDict", - "HTTPValidationError", - "HTTPValidationErrorData", + "GithubRepositoryTypedDict", "Hyperparameters", "HyperparametersTypedDict", + "ImageDetail", "ImageGenerationTool", "ImageGenerationToolTypedDict", "ImageURL", "ImageURLChunk", - "ImageURLChunkType", "ImageURLChunkTypedDict", "ImageURLTypedDict", "ImageURLUnion", @@ -1247,64 +1323,94 @@ "InputEntries", "InputEntriesTypedDict", "Inputs", - "InputsMessage", - "InputsMessageTypedDict", "InputsTypedDict", "InstructRequest", - "InstructRequestInputs", - "InstructRequestInputsTypedDict", "InstructRequestMessage", "InstructRequestMessageTypedDict", "InstructRequestTypedDict", "JSONSchema", "JSONSchemaTypedDict", - "JobIn", - "JobInIntegration", - "JobInIntegrationTypedDict", - "JobInRepository", - "JobInRepositoryTypedDict", - "JobInTypedDict", - "JobMetadataOut", - "JobMetadataOutTypedDict", - "JobsOut", - "JobsOutData", - "JobsOutDataTypedDict", - "JobsOutTypedDict", - "LegacyJobMetadataOut", - "LegacyJobMetadataOutTypedDict", - "LibraryIn", - "LibraryInTypedDict", - "LibraryInUpdate", - "LibraryInUpdateTypedDict", - "LibraryOut", - "LibraryOutTypedDict", - "ListAgentAliasesRequest", - "ListAgentAliasesRequestTypedDict", - "ListAgentVersionsRequest", - "ListAgentVersionsRequestTypedDict", - "ListAgentsRequest", - "ListAgentsRequestTypedDict", - "ListBatchJobsRequest", - "ListBatchJobsRequestTypedDict", - "ListConversationsRequest", - "ListConversationsRequestTypedDict", - "ListConversationsResponse", - "ListConversationsResponseTypedDict", - "ListDocumentOut", - "ListDocumentOutTypedDict", - "ListDocumentsRequest", - "ListDocumentsRequestTypedDict", - "ListFilesOut", - "ListFilesOutTypedDict", - "ListFilesRequest", - "ListFilesRequestTypedDict", - "ListFineTuningJobsRequest", - "ListFineTuningJobsRequestTypedDict", - "ListFineTuningJobsStatus", - "ListLibraryAccessesRequest", - "ListLibraryAccessesRequestTypedDict", - "ListLibraryOut", - "ListLibraryOutTypedDict", + "JobMetadata", + "JobMetadataTypedDict", + "JobsAPIRoutesBatchCancelBatchJobRequest", + "JobsAPIRoutesBatchCancelBatchJobRequestTypedDict", + "JobsAPIRoutesBatchGetBatchJobRequest", + "JobsAPIRoutesBatchGetBatchJobRequestTypedDict", + "JobsAPIRoutesBatchGetBatchJobsRequest", + "JobsAPIRoutesBatchGetBatchJobsRequestTypedDict", + "JobsAPIRoutesFineTuningArchiveFineTunedModelRequest", + "JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict", + "JobsAPIRoutesFineTuningCancelFineTuningJobRequest", + "JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict", + "JobsAPIRoutesFineTuningCancelFineTuningJobResponse", + "JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict", + "JobsAPIRoutesFineTuningCreateFineTuningJobResponse", + "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict", + "JobsAPIRoutesFineTuningGetFineTuningJobRequest", + "JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict", + "JobsAPIRoutesFineTuningGetFineTuningJobResponse", + "JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict", + "JobsAPIRoutesFineTuningGetFineTuningJobsRequest", + "JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict", + "JobsAPIRoutesFineTuningGetFineTuningJobsStatus", + "JobsAPIRoutesFineTuningStartFineTuningJobRequest", + "JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict", + "JobsAPIRoutesFineTuningStartFineTuningJobResponse", + "JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict", + "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest", + "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict", + "JobsAPIRoutesFineTuningUpdateFineTunedModelRequest", + "JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict", + "JobsAPIRoutesFineTuningUpdateFineTunedModelResponse", + "JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict", + "LegacyJobMetadata", + "LegacyJobMetadataTypedDict", + "LibrariesDeleteV1Request", + "LibrariesDeleteV1RequestTypedDict", + "LibrariesDocumentsDeleteV1Request", + "LibrariesDocumentsDeleteV1RequestTypedDict", + "LibrariesDocumentsGetExtractedTextSignedURLV1Request", + "LibrariesDocumentsGetExtractedTextSignedURLV1RequestTypedDict", + "LibrariesDocumentsGetSignedURLV1Request", + "LibrariesDocumentsGetSignedURLV1RequestTypedDict", + "LibrariesDocumentsGetStatusV1Request", + "LibrariesDocumentsGetStatusV1RequestTypedDict", + "LibrariesDocumentsGetTextContentV1Request", + "LibrariesDocumentsGetTextContentV1RequestTypedDict", + "LibrariesDocumentsGetV1Request", + "LibrariesDocumentsGetV1RequestTypedDict", + "LibrariesDocumentsListV1Request", + "LibrariesDocumentsListV1RequestTypedDict", + "LibrariesDocumentsReprocessV1Request", + "LibrariesDocumentsReprocessV1RequestTypedDict", + "LibrariesDocumentsUpdateV1Request", + "LibrariesDocumentsUpdateV1RequestTypedDict", + "LibrariesDocumentsUploadV1Request", + "LibrariesDocumentsUploadV1RequestTypedDict", + "LibrariesGetV1Request", + "LibrariesGetV1RequestTypedDict", + "LibrariesShareCreateV1Request", + "LibrariesShareCreateV1RequestTypedDict", + "LibrariesShareDeleteV1Request", + "LibrariesShareDeleteV1RequestTypedDict", + "LibrariesShareListV1Request", + "LibrariesShareListV1RequestTypedDict", + "LibrariesUpdateV1Request", + "LibrariesUpdateV1RequestTypedDict", + "Library", + "LibraryTypedDict", + "ListBatchJobsResponse", + "ListBatchJobsResponseTypedDict", + "ListDocumentsResponse", + "ListDocumentsResponseTypedDict", + "ListFilesResponse", + "ListFilesResponseTypedDict", + "ListFineTuningJobsResponse", + "ListFineTuningJobsResponseData", + "ListFineTuningJobsResponseDataTypedDict", + "ListFineTuningJobsResponseTypedDict", + "ListLibrariesResponse", + "ListLibrariesResponseTypedDict", "ListSharingOut", "ListSharingOutTypedDict", "Loc", @@ -1316,32 +1422,23 @@ "MessageInputEntry", "MessageInputEntryContent", "MessageInputEntryContentTypedDict", - "MessageInputEntryObject", - "MessageInputEntryRole", - "MessageInputEntryType", "MessageInputEntryTypedDict", "MessageOutputContentChunks", "MessageOutputContentChunksTypedDict", "MessageOutputEntry", "MessageOutputEntryContent", "MessageOutputEntryContentTypedDict", - "MessageOutputEntryObject", - "MessageOutputEntryRole", - "MessageOutputEntryType", "MessageOutputEntryTypedDict", "MessageOutputEvent", "MessageOutputEventContent", "MessageOutputEventContentTypedDict", - "MessageOutputEventRole", "MessageOutputEventTypedDict", - "MetricOut", - "MetricOutTypedDict", - "MistralError", + "Metric", + "MetricTypedDict", "MistralPromptMode", "ModelCapabilities", "ModelCapabilitiesTypedDict", "ModelConversation", - "ModelConversationObject", "ModelConversationTool", "ModelConversationToolTypedDict", "ModelConversationTypedDict", @@ -1355,7 +1452,6 @@ "ModerationResponseTypedDict", "MultiPartBodyParams", "MultiPartBodyParamsTypedDict", - "NoResponseError", "OCRImageObject", "OCRImageObjectTypedDict", "OCRPageDimensions", @@ -1387,17 +1483,24 @@ "RealtimeTranscriptionErrorDetailMessageTypedDict", "RealtimeTranscriptionErrorDetailTypedDict", "RealtimeTranscriptionErrorTypedDict", + "RealtimeTranscriptionInputAudioAppend", + "RealtimeTranscriptionInputAudioAppendTypedDict", + "RealtimeTranscriptionInputAudioEnd", + "RealtimeTranscriptionInputAudioEndTypedDict", + "RealtimeTranscriptionInputAudioFlush", + "RealtimeTranscriptionInputAudioFlushTypedDict", "RealtimeTranscriptionSession", "RealtimeTranscriptionSessionCreated", "RealtimeTranscriptionSessionCreatedTypedDict", "RealtimeTranscriptionSessionTypedDict", + "RealtimeTranscriptionSessionUpdateMessage", + "RealtimeTranscriptionSessionUpdateMessageTypedDict", + "RealtimeTranscriptionSessionUpdatePayload", + "RealtimeTranscriptionSessionUpdatePayloadTypedDict", "RealtimeTranscriptionSessionUpdated", "RealtimeTranscriptionSessionUpdatedTypedDict", "ReferenceChunk", - "ReferenceChunkType", "ReferenceChunkTypedDict", - "ReprocessDocumentRequest", - "ReprocessDocumentRequestTypedDict", "RequestSource", "Response", "ResponseDoneEvent", @@ -1414,18 +1517,9 @@ "ResponseTypedDict", "ResponseV1ConversationsGet", "ResponseV1ConversationsGetTypedDict", - "ResponseValidationError", - "RestartConversationRequest", - "RestartConversationRequestTypedDict", - "RestartConversationStreamRequest", - "RestartConversationStreamRequestTypedDict", - "RetrieveFileOut", - "RetrieveFileOutTypedDict", - "RetrieveFileRequest", - "RetrieveFileRequestTypedDict", - "RetrieveModelRequest", - "RetrieveModelRequestTypedDict", - "SDKError", + "RetrieveModelV1ModelsModelIDGetRequest", + "RetrieveModelV1ModelsModelIDGetRequestTypedDict", + "Role", "SSETypes", "SampleType", "Security", @@ -1438,10 +1532,6 @@ "SharingOut", "SharingOutTypedDict", "Source", - "StartFineTuningJobRequest", - "StartFineTuningJobRequestTypedDict", - "StartFineTuningJobResponse", - "StartFineTuningJobResponseTypedDict", "SystemMessage", "SystemMessageContent", "SystemMessageContentChunks", @@ -1450,20 +1540,22 @@ "SystemMessageTypedDict", "TableFormat", "TextChunk", - "TextChunkType", "TextChunkTypedDict", "ThinkChunk", - "ThinkChunkType", + "ThinkChunkThinking", + "ThinkChunkThinkingTypedDict", "ThinkChunkTypedDict", - "Thinking", - "ThinkingTypedDict", "TimestampGranularity", "Tool", "ToolCall", + "ToolCallConfirmation", + "ToolCallConfirmationTypedDict", "ToolCallTypedDict", "ToolChoice", "ToolChoiceEnum", "ToolChoiceTypedDict", + "ToolConfiguration", + "ToolConfigurationTypedDict", "ToolExecutionDeltaEvent", "ToolExecutionDeltaEventName", "ToolExecutionDeltaEventNameTypedDict", @@ -1475,8 +1567,6 @@ "ToolExecutionEntry", "ToolExecutionEntryName", "ToolExecutionEntryNameTypedDict", - "ToolExecutionEntryObject", - "ToolExecutionEntryType", "ToolExecutionEntryTypedDict", "ToolExecutionStartedEvent", "ToolExecutionStartedEventName", @@ -1485,7 +1575,6 @@ "ToolFileChunk", "ToolFileChunkTool", "ToolFileChunkToolTypedDict", - "ToolFileChunkType", "ToolFileChunkTypedDict", "ToolMessage", "ToolMessageContent", @@ -1494,7 +1583,6 @@ "ToolReferenceChunk", "ToolReferenceChunkTool", "ToolReferenceChunkToolTypedDict", - "ToolReferenceChunkType", "ToolReferenceChunkTypedDict", "ToolTypedDict", "ToolTypes", @@ -1503,7 +1591,6 @@ "TranscriptionResponse", "TranscriptionResponseTypedDict", "TranscriptionSegmentChunk", - "TranscriptionSegmentChunkType", "TranscriptionSegmentChunkTypedDict", "TranscriptionStreamDone", "TranscriptionStreamDoneTypedDict", @@ -1518,30 +1605,37 @@ "TranscriptionStreamSegmentDeltaTypedDict", "TranscriptionStreamTextDelta", "TranscriptionStreamTextDeltaTypedDict", - "UnarchiveFTModelOut", - "UnarchiveFTModelOutTypedDict", - "UnarchiveModelRequest", - "UnarchiveModelRequestTypedDict", + "UnarchiveModelResponse", + "UnarchiveModelResponseTypedDict", + "UnknownAgentTool", + "UnknownClassifierFineTuningJobDetailsIntegration", + "UnknownClassifierFineTuningJobIntegration", + "UnknownCompletionFineTuningJobDetailsIntegration", + "UnknownCompletionFineTuningJobDetailsRepository", + "UnknownCompletionFineTuningJobIntegration", + "UnknownCompletionFineTuningJobRepository", + "UnknownContentChunk", + "UnknownConversationEventsData", + "UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse", + "UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse", + "UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse", + "UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse", + "UnknownListFineTuningJobsResponseData", + "UnknownModelConversationTool", + "UnknownModelListData", + "UnknownResponse", + "UnknownResponseRetrieveModelV1ModelsModelIDGet", + "UnknownTranscriptionStreamEventsData", "UpdateAgentRequest", + "UpdateAgentRequestTool", + "UpdateAgentRequestToolTypedDict", "UpdateAgentRequestTypedDict", - "UpdateAgentVersionRequest", - "UpdateAgentVersionRequestTypedDict", "UpdateDocumentRequest", "UpdateDocumentRequestTypedDict", - "UpdateFTModelIn", - "UpdateFTModelInTypedDict", "UpdateLibraryRequest", "UpdateLibraryRequestTypedDict", "UpdateModelRequest", "UpdateModelRequestTypedDict", - "UpdateModelResponse", - "UpdateModelResponseTypedDict", - "UpdateOrCreateLibraryAccessRequest", - "UpdateOrCreateLibraryAccessRequestTypedDict", - "UploadDocumentRequest", - "UploadDocumentRequestTypedDict", - "UploadFileOut", - "UploadFileOutTypedDict", "UsageInfo", "UsageInfoTypedDict", "UserMessage", @@ -1551,8 +1645,8 @@ "ValidationError", "ValidationErrorTypedDict", "WandbIntegration", - "WandbIntegrationOut", - "WandbIntegrationOutTypedDict", + "WandbIntegrationResult", + "WandbIntegrationResultTypedDict", "WandbIntegrationTypedDict", "WebSearchPremiumTool", "WebSearchPremiumToolTypedDict", @@ -1562,29 +1656,66 @@ _dynamic_imports: dict[str, str] = { "Agent": ".agent", - "AgentObject": ".agent", "AgentTool": ".agent", "AgentToolTypedDict": ".agent", "AgentTypedDict": ".agent", + "UnknownAgentTool": ".agent", "AgentAliasResponse": ".agentaliasresponse", "AgentAliasResponseTypedDict": ".agentaliasresponse", "AgentConversation": ".agentconversation", "AgentConversationAgentVersion": ".agentconversation", "AgentConversationAgentVersionTypedDict": ".agentconversation", - "AgentConversationObject": ".agentconversation", "AgentConversationTypedDict": ".agentconversation", - "AgentCreationRequest": ".agentcreationrequest", - "AgentCreationRequestTool": ".agentcreationrequest", - "AgentCreationRequestToolTypedDict": ".agentcreationrequest", - "AgentCreationRequestTypedDict": ".agentcreationrequest", "AgentHandoffDoneEvent": ".agenthandoffdoneevent", "AgentHandoffDoneEventTypedDict": ".agenthandoffdoneevent", "AgentHandoffEntry": ".agenthandoffentry", - "AgentHandoffEntryObject": ".agenthandoffentry", - "AgentHandoffEntryType": ".agenthandoffentry", "AgentHandoffEntryTypedDict": ".agenthandoffentry", "AgentHandoffStartedEvent": ".agenthandoffstartedevent", "AgentHandoffStartedEventTypedDict": ".agenthandoffstartedevent", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequest": ".agents_api_v1_agents_create_or_update_aliasop", + "AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict": ".agents_api_v1_agents_create_or_update_aliasop", + "AgentsAPIV1AgentsDeleteAliasRequest": ".agents_api_v1_agents_delete_aliasop", + "AgentsAPIV1AgentsDeleteAliasRequestTypedDict": ".agents_api_v1_agents_delete_aliasop", + "AgentsAPIV1AgentsDeleteRequest": ".agents_api_v1_agents_deleteop", + "AgentsAPIV1AgentsDeleteRequestTypedDict": ".agents_api_v1_agents_deleteop", + "AgentsAPIV1AgentsGetVersionRequest": ".agents_api_v1_agents_get_versionop", + "AgentsAPIV1AgentsGetVersionRequestTypedDict": ".agents_api_v1_agents_get_versionop", + "AgentsAPIV1AgentsGetAgentVersion": ".agents_api_v1_agents_getop", + "AgentsAPIV1AgentsGetAgentVersionTypedDict": ".agents_api_v1_agents_getop", + "AgentsAPIV1AgentsGetRequest": ".agents_api_v1_agents_getop", + "AgentsAPIV1AgentsGetRequestTypedDict": ".agents_api_v1_agents_getop", + "AgentsAPIV1AgentsListVersionAliasesRequest": ".agents_api_v1_agents_list_version_aliasesop", + "AgentsAPIV1AgentsListVersionAliasesRequestTypedDict": ".agents_api_v1_agents_list_version_aliasesop", + "AgentsAPIV1AgentsListVersionsRequest": ".agents_api_v1_agents_list_versionsop", + "AgentsAPIV1AgentsListVersionsRequestTypedDict": ".agents_api_v1_agents_list_versionsop", + "AgentsAPIV1AgentsListRequest": ".agents_api_v1_agents_listop", + "AgentsAPIV1AgentsListRequestTypedDict": ".agents_api_v1_agents_listop", + "AgentsAPIV1AgentsUpdateVersionRequest": ".agents_api_v1_agents_update_versionop", + "AgentsAPIV1AgentsUpdateVersionRequestTypedDict": ".agents_api_v1_agents_update_versionop", + "AgentsAPIV1AgentsUpdateRequest": ".agents_api_v1_agents_updateop", + "AgentsAPIV1AgentsUpdateRequestTypedDict": ".agents_api_v1_agents_updateop", + "AgentsAPIV1ConversationsAppendStreamRequest": ".agents_api_v1_conversations_append_streamop", + "AgentsAPIV1ConversationsAppendStreamRequestTypedDict": ".agents_api_v1_conversations_append_streamop", + "AgentsAPIV1ConversationsAppendRequest": ".agents_api_v1_conversations_appendop", + "AgentsAPIV1ConversationsAppendRequestTypedDict": ".agents_api_v1_conversations_appendop", + "AgentsAPIV1ConversationsDeleteRequest": ".agents_api_v1_conversations_deleteop", + "AgentsAPIV1ConversationsDeleteRequestTypedDict": ".agents_api_v1_conversations_deleteop", + "AgentsAPIV1ConversationsGetRequest": ".agents_api_v1_conversations_getop", + "AgentsAPIV1ConversationsGetRequestTypedDict": ".agents_api_v1_conversations_getop", + "ResponseV1ConversationsGet": ".agents_api_v1_conversations_getop", + "ResponseV1ConversationsGetTypedDict": ".agents_api_v1_conversations_getop", + "AgentsAPIV1ConversationsHistoryRequest": ".agents_api_v1_conversations_historyop", + "AgentsAPIV1ConversationsHistoryRequestTypedDict": ".agents_api_v1_conversations_historyop", + "AgentsAPIV1ConversationsListRequest": ".agents_api_v1_conversations_listop", + "AgentsAPIV1ConversationsListRequestTypedDict": ".agents_api_v1_conversations_listop", + "AgentsAPIV1ConversationsListResponse": ".agents_api_v1_conversations_listop", + "AgentsAPIV1ConversationsListResponseTypedDict": ".agents_api_v1_conversations_listop", + "AgentsAPIV1ConversationsMessagesRequest": ".agents_api_v1_conversations_messagesop", + "AgentsAPIV1ConversationsMessagesRequestTypedDict": ".agents_api_v1_conversations_messagesop", + "AgentsAPIV1ConversationsRestartStreamRequest": ".agents_api_v1_conversations_restart_streamop", + "AgentsAPIV1ConversationsRestartStreamRequestTypedDict": ".agents_api_v1_conversations_restart_streamop", + "AgentsAPIV1ConversationsRestartRequest": ".agents_api_v1_conversations_restartop", + "AgentsAPIV1ConversationsRestartRequestTypedDict": ".agents_api_v1_conversations_restartop", "AgentsCompletionRequest": ".agentscompletionrequest", "AgentsCompletionRequestMessage": ".agentscompletionrequest", "AgentsCompletionRequestMessageTypedDict": ".agentscompletionrequest", @@ -1601,23 +1732,12 @@ "AgentsCompletionStreamRequestToolChoice": ".agentscompletionstreamrequest", "AgentsCompletionStreamRequestToolChoiceTypedDict": ".agentscompletionstreamrequest", "AgentsCompletionStreamRequestTypedDict": ".agentscompletionstreamrequest", - "AgentUpdateRequest": ".agentupdaterequest", - "AgentUpdateRequestTool": ".agentupdaterequest", - "AgentUpdateRequestToolTypedDict": ".agentupdaterequest", - "AgentUpdateRequestTypedDict": ".agentupdaterequest", "APIEndpoint": ".apiendpoint", - "AppendConversationRequest": ".appendconversationop", - "AppendConversationRequestTypedDict": ".appendconversationop", - "AppendConversationStreamRequest": ".appendconversationstreamop", - "AppendConversationStreamRequestTypedDict": ".appendconversationstreamop", - "ArchiveFTModelOut": ".archiveftmodelout", - "ArchiveFTModelOutTypedDict": ".archiveftmodelout", - "ArchiveModelRequest": ".archivemodelop", - "ArchiveModelRequestTypedDict": ".archivemodelop", + "ArchiveModelResponse": ".archivemodelresponse", + "ArchiveModelResponseTypedDict": ".archivemodelresponse", "AssistantMessage": ".assistantmessage", "AssistantMessageContent": ".assistantmessage", "AssistantMessageContentTypedDict": ".assistantmessage", - "AssistantMessageRole": ".assistantmessage", "AssistantMessageTypedDict": ".assistantmessage", "AudioChunk": ".audiochunk", "AudioChunkTypedDict": ".audiochunk", @@ -1632,22 +1752,12 @@ "BaseModelCardTypedDict": ".basemodelcard", "BatchError": ".batcherror", "BatchErrorTypedDict": ".batcherror", - "BatchJobIn": ".batchjobin", - "BatchJobInTypedDict": ".batchjobin", - "BatchJobOut": ".batchjobout", - "BatchJobOutTypedDict": ".batchjobout", - "BatchJobsOut": ".batchjobsout", - "BatchJobsOutTypedDict": ".batchjobsout", + "BatchJob": ".batchjob", + "BatchJobTypedDict": ".batchjob", "BatchJobStatus": ".batchjobstatus", "BatchRequest": ".batchrequest", "BatchRequestTypedDict": ".batchrequest", "BuiltInConnectors": ".builtinconnectors", - "CancelBatchJobRequest": ".cancelbatchjobop", - "CancelBatchJobRequestTypedDict": ".cancelbatchjobop", - "CancelFineTuningJobRequest": ".cancelfinetuningjobop", - "CancelFineTuningJobRequestTypedDict": ".cancelfinetuningjobop", - "CancelFineTuningJobResponse": ".cancelfinetuningjobop", - "CancelFineTuningJobResponseTypedDict": ".cancelfinetuningjobop", "ChatClassificationRequest": ".chatclassificationrequest", "ChatClassificationRequestTypedDict": ".chatclassificationrequest", "ChatCompletionChoice": ".chatcompletionchoice", @@ -1679,8 +1789,8 @@ "ChatModerationRequestInputs3": ".chatmoderationrequest", "ChatModerationRequestInputs3TypedDict": ".chatmoderationrequest", "ChatModerationRequestTypedDict": ".chatmoderationrequest", - "CheckpointOut": ".checkpointout", - "CheckpointOutTypedDict": ".checkpointout", + "Checkpoint": ".checkpoint", + "CheckpointTypedDict": ".checkpoint", "ClassificationRequest": ".classificationrequest", "ClassificationRequestInputs": ".classificationrequest", "ClassificationRequestInputsTypedDict": ".classificationrequest", @@ -1689,26 +1799,26 @@ "ClassificationResponseTypedDict": ".classificationresponse", "ClassificationTargetResult": ".classificationtargetresult", "ClassificationTargetResultTypedDict": ".classificationtargetresult", - "ClassifierDetailedJobOut": ".classifierdetailedjobout", - "ClassifierDetailedJobOutIntegration": ".classifierdetailedjobout", - "ClassifierDetailedJobOutIntegrationTypedDict": ".classifierdetailedjobout", - "ClassifierDetailedJobOutStatus": ".classifierdetailedjobout", - "ClassifierDetailedJobOutTypedDict": ".classifierdetailedjobout", - "ClassifierFTModelOut": ".classifierftmodelout", - "ClassifierFTModelOutTypedDict": ".classifierftmodelout", - "ClassifierJobOut": ".classifierjobout", - "ClassifierJobOutIntegration": ".classifierjobout", - "ClassifierJobOutIntegrationTypedDict": ".classifierjobout", - "ClassifierJobOutStatus": ".classifierjobout", - "ClassifierJobOutTypedDict": ".classifierjobout", - "ClassifierTargetIn": ".classifiertargetin", - "ClassifierTargetInTypedDict": ".classifiertargetin", - "ClassifierTargetOut": ".classifiertargetout", - "ClassifierTargetOutTypedDict": ".classifiertargetout", + "ClassifierFineTunedModel": ".classifierfinetunedmodel", + "ClassifierFineTunedModelTypedDict": ".classifierfinetunedmodel", + "ClassifierFineTuningJob": ".classifierfinetuningjob", + "ClassifierFineTuningJobIntegration": ".classifierfinetuningjob", + "ClassifierFineTuningJobIntegrationTypedDict": ".classifierfinetuningjob", + "ClassifierFineTuningJobStatus": ".classifierfinetuningjob", + "ClassifierFineTuningJobTypedDict": ".classifierfinetuningjob", + "UnknownClassifierFineTuningJobIntegration": ".classifierfinetuningjob", + "ClassifierFineTuningJobDetails": ".classifierfinetuningjobdetails", + "ClassifierFineTuningJobDetailsIntegration": ".classifierfinetuningjobdetails", + "ClassifierFineTuningJobDetailsIntegrationTypedDict": ".classifierfinetuningjobdetails", + "ClassifierFineTuningJobDetailsStatus": ".classifierfinetuningjobdetails", + "ClassifierFineTuningJobDetailsTypedDict": ".classifierfinetuningjobdetails", + "UnknownClassifierFineTuningJobDetailsIntegration": ".classifierfinetuningjobdetails", + "ClassifierTarget": ".classifiertarget", + "ClassifierTargetTypedDict": ".classifiertarget", + "ClassifierTargetResult": ".classifiertargetresult", + "ClassifierTargetResultTypedDict": ".classifiertargetresult", "ClassifierTrainingParameters": ".classifiertrainingparameters", "ClassifierTrainingParametersTypedDict": ".classifiertrainingparameters", - "ClassifierTrainingParametersIn": ".classifiertrainingparametersin", - "ClassifierTrainingParametersInTypedDict": ".classifiertrainingparametersin", "CodeInterpreterTool": ".codeinterpretertool", "CodeInterpreterToolTypedDict": ".codeinterpretertool", "CompletionArgs": ".completionargs", @@ -1717,33 +1827,36 @@ "CompletionArgsStopTypedDict": ".completionargsstop", "CompletionChunk": ".completionchunk", "CompletionChunkTypedDict": ".completionchunk", - "CompletionDetailedJobOut": ".completiondetailedjobout", - "CompletionDetailedJobOutIntegration": ".completiondetailedjobout", - "CompletionDetailedJobOutIntegrationTypedDict": ".completiondetailedjobout", - "CompletionDetailedJobOutRepository": ".completiondetailedjobout", - "CompletionDetailedJobOutRepositoryTypedDict": ".completiondetailedjobout", - "CompletionDetailedJobOutStatus": ".completiondetailedjobout", - "CompletionDetailedJobOutTypedDict": ".completiondetailedjobout", "CompletionEvent": ".completionevent", "CompletionEventTypedDict": ".completionevent", - "CompletionFTModelOut": ".completionftmodelout", - "CompletionFTModelOutTypedDict": ".completionftmodelout", - "CompletionJobOut": ".completionjobout", - "CompletionJobOutIntegration": ".completionjobout", - "CompletionJobOutIntegrationTypedDict": ".completionjobout", - "CompletionJobOutRepository": ".completionjobout", - "CompletionJobOutRepositoryTypedDict": ".completionjobout", - "CompletionJobOutStatus": ".completionjobout", - "CompletionJobOutTypedDict": ".completionjobout", + "CompletionFineTunedModel": ".completionfinetunedmodel", + "CompletionFineTunedModelTypedDict": ".completionfinetunedmodel", + "CompletionFineTuningJob": ".completionfinetuningjob", + "CompletionFineTuningJobIntegration": ".completionfinetuningjob", + "CompletionFineTuningJobIntegrationTypedDict": ".completionfinetuningjob", + "CompletionFineTuningJobRepository": ".completionfinetuningjob", + "CompletionFineTuningJobRepositoryTypedDict": ".completionfinetuningjob", + "CompletionFineTuningJobStatus": ".completionfinetuningjob", + "CompletionFineTuningJobTypedDict": ".completionfinetuningjob", + "UnknownCompletionFineTuningJobIntegration": ".completionfinetuningjob", + "UnknownCompletionFineTuningJobRepository": ".completionfinetuningjob", + "CompletionFineTuningJobDetails": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsIntegration": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsIntegrationTypedDict": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsRepository": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsRepositoryTypedDict": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsStatus": ".completionfinetuningjobdetails", + "CompletionFineTuningJobDetailsTypedDict": ".completionfinetuningjobdetails", + "UnknownCompletionFineTuningJobDetailsIntegration": ".completionfinetuningjobdetails", + "UnknownCompletionFineTuningJobDetailsRepository": ".completionfinetuningjobdetails", "CompletionResponseStreamChoice": ".completionresponsestreamchoice", "CompletionResponseStreamChoiceFinishReason": ".completionresponsestreamchoice", "CompletionResponseStreamChoiceTypedDict": ".completionresponsestreamchoice", "CompletionTrainingParameters": ".completiontrainingparameters", "CompletionTrainingParametersTypedDict": ".completiontrainingparameters", - "CompletionTrainingParametersIn": ".completiontrainingparametersin", - "CompletionTrainingParametersInTypedDict": ".completiontrainingparametersin", "ContentChunk": ".contentchunk", "ContentChunkTypedDict": ".contentchunk", + "UnknownContentChunk": ".contentchunk", "ConversationAppendRequest": ".conversationappendrequest", "ConversationAppendRequestHandoffExecution": ".conversationappendrequest", "ConversationAppendRequestTypedDict": ".conversationappendrequest", @@ -1754,15 +1867,14 @@ "ConversationEventsData": ".conversationevents", "ConversationEventsDataTypedDict": ".conversationevents", "ConversationEventsTypedDict": ".conversationevents", + "UnknownConversationEventsData": ".conversationevents", "ConversationHistory": ".conversationhistory", - "ConversationHistoryObject": ".conversationhistory", "ConversationHistoryTypedDict": ".conversationhistory", "Entry": ".conversationhistory", "EntryTypedDict": ".conversationhistory", "ConversationInputs": ".conversationinputs", "ConversationInputsTypedDict": ".conversationinputs", "ConversationMessages": ".conversationmessages", - "ConversationMessagesObject": ".conversationmessages", "ConversationMessagesTypedDict": ".conversationmessages", "ConversationRequest": ".conversationrequest", "ConversationRequestAgentVersion": ".conversationrequest", @@ -1772,7 +1884,6 @@ "ConversationRequestToolTypedDict": ".conversationrequest", "ConversationRequestTypedDict": ".conversationrequest", "ConversationResponse": ".conversationresponse", - "ConversationResponseObject": ".conversationresponse", "ConversationResponseTypedDict": ".conversationresponse", "Output": ".conversationresponse", "OutputTypedDict": ".conversationresponse", @@ -1793,53 +1904,48 @@ "ConversationStreamRequestTool": ".conversationstreamrequest", "ConversationStreamRequestToolTypedDict": ".conversationstreamrequest", "ConversationStreamRequestTypedDict": ".conversationstreamrequest", + "ConversationThinkChunk": ".conversationthinkchunk", + "ConversationThinkChunkThinking": ".conversationthinkchunk", + "ConversationThinkChunkThinkingTypedDict": ".conversationthinkchunk", + "ConversationThinkChunkTypedDict": ".conversationthinkchunk", "ConversationUsageInfo": ".conversationusageinfo", "ConversationUsageInfoTypedDict": ".conversationusageinfo", - "CreateFineTuningJobResponse": ".createfinetuningjobop", - "CreateFineTuningJobResponseTypedDict": ".createfinetuningjobop", - "Response": ".createfinetuningjobop", - "ResponseTypedDict": ".createfinetuningjobop", - "CreateOrUpdateAgentAliasRequest": ".createorupdateagentaliasop", - "CreateOrUpdateAgentAliasRequestTypedDict": ".createorupdateagentaliasop", - "DeleteAgentAliasRequest": ".deleteagentaliasop", - "DeleteAgentAliasRequestTypedDict": ".deleteagentaliasop", - "DeleteAgentRequest": ".deleteagentop", - "DeleteAgentRequestTypedDict": ".deleteagentop", - "DeleteConversationRequest": ".deleteconversationop", - "DeleteConversationRequestTypedDict": ".deleteconversationop", - "DeleteDocumentRequest": ".deletedocumentop", - "DeleteDocumentRequestTypedDict": ".deletedocumentop", - "DeleteFileRequest": ".deletefileop", - "DeleteFileRequestTypedDict": ".deletefileop", - "DeleteFileOut": ".deletefileout", - "DeleteFileOutTypedDict": ".deletefileout", - "DeleteLibraryAccessRequest": ".deletelibraryaccessop", - "DeleteLibraryAccessRequestTypedDict": ".deletelibraryaccessop", - "DeleteLibraryRequest": ".deletelibraryop", - "DeleteLibraryRequestTypedDict": ".deletelibraryop", - "DeleteModelRequest": ".deletemodelop", - "DeleteModelRequestTypedDict": ".deletemodelop", + "CreateAgentRequest": ".createagentrequest", + "CreateAgentRequestTool": ".createagentrequest", + "CreateAgentRequestToolTypedDict": ".createagentrequest", + "CreateAgentRequestTypedDict": ".createagentrequest", + "CreateBatchJobRequest": ".createbatchjobrequest", + "CreateBatchJobRequestTypedDict": ".createbatchjobrequest", + "CreateFileResponse": ".createfileresponse", + "CreateFileResponseTypedDict": ".createfileresponse", + "CreateFineTuningJobRequest": ".createfinetuningjobrequest", + "CreateFineTuningJobRequestIntegration": ".createfinetuningjobrequest", + "CreateFineTuningJobRequestIntegrationTypedDict": ".createfinetuningjobrequest", + "CreateFineTuningJobRequestRepository": ".createfinetuningjobrequest", + "CreateFineTuningJobRequestRepositoryTypedDict": ".createfinetuningjobrequest", + "CreateFineTuningJobRequestTypedDict": ".createfinetuningjobrequest", + "Hyperparameters": ".createfinetuningjobrequest", + "HyperparametersTypedDict": ".createfinetuningjobrequest", + "CreateLibraryRequest": ".createlibraryrequest", + "CreateLibraryRequestTypedDict": ".createlibraryrequest", + "DeleteModelV1ModelsModelIDDeleteRequest": ".delete_model_v1_models_model_id_deleteop", + "DeleteModelV1ModelsModelIDDeleteRequestTypedDict": ".delete_model_v1_models_model_id_deleteop", + "DeleteFileResponse": ".deletefileresponse", + "DeleteFileResponseTypedDict": ".deletefileresponse", "DeleteModelOut": ".deletemodelout", "DeleteModelOutTypedDict": ".deletemodelout", "DeltaMessage": ".deltamessage", "DeltaMessageContent": ".deltamessage", "DeltaMessageContentTypedDict": ".deltamessage", "DeltaMessageTypedDict": ".deltamessage", + "Document": ".document", + "DocumentTypedDict": ".document", "DocumentLibraryTool": ".documentlibrarytool", "DocumentLibraryToolTypedDict": ".documentlibrarytool", - "DocumentOut": ".documentout", - "DocumentOutTypedDict": ".documentout", "DocumentTextContent": ".documenttextcontent", "DocumentTextContentTypedDict": ".documenttextcontent", - "Attributes": ".documentupdatein", - "AttributesTypedDict": ".documentupdatein", - "DocumentUpdateIn": ".documentupdatein", - "DocumentUpdateInTypedDict": ".documentupdatein", "DocumentURLChunk": ".documenturlchunk", - "DocumentURLChunkType": ".documenturlchunk", "DocumentURLChunkTypedDict": ".documenturlchunk", - "DownloadFileRequest": ".downloadfileop", - "DownloadFileRequestTypedDict": ".downloadfileop", "EmbeddingDtype": ".embeddingdtype", "EmbeddingRequest": ".embeddingrequest", "EmbeddingRequestInputs": ".embeddingrequest", @@ -1851,17 +1957,27 @@ "EmbeddingResponseDataTypedDict": ".embeddingresponsedata", "EncodingFormat": ".encodingformat", "EntityType": ".entitytype", - "EventOut": ".eventout", - "EventOutTypedDict": ".eventout", + "Event": ".event", + "EventTypedDict": ".event", "File": ".file", "FileTypedDict": ".file", "FileChunk": ".filechunk", "FileChunkTypedDict": ".filechunk", "FilePurpose": ".filepurpose", + "FilesAPIRoutesDeleteFileRequest": ".files_api_routes_delete_fileop", + "FilesAPIRoutesDeleteFileRequestTypedDict": ".files_api_routes_delete_fileop", + "FilesAPIRoutesDownloadFileRequest": ".files_api_routes_download_fileop", + "FilesAPIRoutesDownloadFileRequestTypedDict": ".files_api_routes_download_fileop", + "FilesAPIRoutesGetSignedURLRequest": ".files_api_routes_get_signed_urlop", + "FilesAPIRoutesGetSignedURLRequestTypedDict": ".files_api_routes_get_signed_urlop", + "FilesAPIRoutesListFilesRequest": ".files_api_routes_list_filesop", + "FilesAPIRoutesListFilesRequestTypedDict": ".files_api_routes_list_filesop", + "FilesAPIRoutesRetrieveFileRequest": ".files_api_routes_retrieve_fileop", + "FilesAPIRoutesRetrieveFileRequestTypedDict": ".files_api_routes_retrieve_fileop", + "MultiPartBodyParams": ".files_api_routes_upload_fileop", + "MultiPartBodyParamsTypedDict": ".files_api_routes_upload_fileop", "FileSchema": ".fileschema", "FileSchemaTypedDict": ".fileschema", - "FileSignedURL": ".filesignedurl", - "FileSignedURLTypedDict": ".filesignedurl", "FIMCompletionRequest": ".fimcompletionrequest", "FIMCompletionRequestStop": ".fimcompletionrequest", "FIMCompletionRequestStopTypedDict": ".fimcompletionrequest", @@ -1873,9 +1989,9 @@ "FIMCompletionStreamRequestStopTypedDict": ".fimcompletionstreamrequest", "FIMCompletionStreamRequestTypedDict": ".fimcompletionstreamrequest", "FineTuneableModelType": ".finetuneablemodeltype", + "FineTunedModelCapabilities": ".finetunedmodelcapabilities", + "FineTunedModelCapabilitiesTypedDict": ".finetunedmodelcapabilities", "FTClassifierLossFunction": ".ftclassifierlossfunction", - "FTModelCapabilitiesOut": ".ftmodelcapabilitiesout", - "FTModelCapabilitiesOutTypedDict": ".ftmodelcapabilitiesout", "FTModelCard": ".ftmodelcard", "FTModelCardTypedDict": ".ftmodelcard", "Function": ".function", @@ -1885,134 +2001,138 @@ "FunctionCall": ".functioncall", "FunctionCallTypedDict": ".functioncall", "FunctionCallEntry": ".functioncallentry", - "FunctionCallEntryObject": ".functioncallentry", - "FunctionCallEntryType": ".functioncallentry", + "FunctionCallEntryConfirmationStatus": ".functioncallentry", "FunctionCallEntryTypedDict": ".functioncallentry", "FunctionCallEntryArguments": ".functioncallentryarguments", "FunctionCallEntryArgumentsTypedDict": ".functioncallentryarguments", "FunctionCallEvent": ".functioncallevent", + "FunctionCallEventConfirmationStatus": ".functioncallevent", "FunctionCallEventTypedDict": ".functioncallevent", "FunctionName": ".functionname", "FunctionNameTypedDict": ".functionname", "FunctionResultEntry": ".functionresultentry", - "FunctionResultEntryObject": ".functionresultentry", - "FunctionResultEntryType": ".functionresultentry", "FunctionResultEntryTypedDict": ".functionresultentry", "FunctionTool": ".functiontool", "FunctionToolTypedDict": ".functiontool", - "GetAgentAgentVersion": ".getagentop", - "GetAgentAgentVersionTypedDict": ".getagentop", - "GetAgentRequest": ".getagentop", - "GetAgentRequestTypedDict": ".getagentop", - "GetAgentVersionRequest": ".getagentversionop", - "GetAgentVersionRequestTypedDict": ".getagentversionop", - "GetBatchJobRequest": ".getbatchjobop", - "GetBatchJobRequestTypedDict": ".getbatchjobop", - "GetConversationHistoryRequest": ".getconversationhistoryop", - "GetConversationHistoryRequestTypedDict": ".getconversationhistoryop", - "GetConversationMessagesRequest": ".getconversationmessagesop", - "GetConversationMessagesRequestTypedDict": ".getconversationmessagesop", - "GetConversationRequest": ".getconversationop", - "GetConversationRequestTypedDict": ".getconversationop", - "ResponseV1ConversationsGet": ".getconversationop", - "ResponseV1ConversationsGetTypedDict": ".getconversationop", - "GetDocumentExtractedTextSignedURLRequest": ".getdocumentextractedtextsignedurlop", - "GetDocumentExtractedTextSignedURLRequestTypedDict": ".getdocumentextractedtextsignedurlop", - "GetDocumentRequest": ".getdocumentop", - "GetDocumentRequestTypedDict": ".getdocumentop", - "GetDocumentSignedURLRequest": ".getdocumentsignedurlop", - "GetDocumentSignedURLRequestTypedDict": ".getdocumentsignedurlop", - "GetDocumentStatusRequest": ".getdocumentstatusop", - "GetDocumentStatusRequestTypedDict": ".getdocumentstatusop", - "GetDocumentTextContentRequest": ".getdocumenttextcontentop", - "GetDocumentTextContentRequestTypedDict": ".getdocumenttextcontentop", - "GetFileSignedURLRequest": ".getfilesignedurlop", - "GetFileSignedURLRequestTypedDict": ".getfilesignedurlop", - "GetFineTuningJobRequest": ".getfinetuningjobop", - "GetFineTuningJobRequestTypedDict": ".getfinetuningjobop", - "GetFineTuningJobResponse": ".getfinetuningjobop", - "GetFineTuningJobResponseTypedDict": ".getfinetuningjobop", - "GetLibraryRequest": ".getlibraryop", - "GetLibraryRequestTypedDict": ".getlibraryop", + "GetFileResponse": ".getfileresponse", + "GetFileResponseTypedDict": ".getfileresponse", + "GetSignedURLResponse": ".getsignedurlresponse", + "GetSignedURLResponseTypedDict": ".getsignedurlresponse", + "GithubRepository": ".githubrepository", + "GithubRepositoryTypedDict": ".githubrepository", "GithubRepositoryIn": ".githubrepositoryin", "GithubRepositoryInTypedDict": ".githubrepositoryin", - "GithubRepositoryOut": ".githubrepositoryout", - "GithubRepositoryOutTypedDict": ".githubrepositoryout", - "HTTPValidationError": ".httpvalidationerror", - "HTTPValidationErrorData": ".httpvalidationerror", + "ImageDetail": ".imagedetail", "ImageGenerationTool": ".imagegenerationtool", "ImageGenerationToolTypedDict": ".imagegenerationtool", "ImageURL": ".imageurl", "ImageURLTypedDict": ".imageurl", "ImageURLChunk": ".imageurlchunk", - "ImageURLChunkType": ".imageurlchunk", "ImageURLChunkTypedDict": ".imageurlchunk", "ImageURLUnion": ".imageurlchunk", "ImageURLUnionTypedDict": ".imageurlchunk", "InputEntries": ".inputentries", "InputEntriesTypedDict": ".inputentries", "Inputs": ".inputs", - "InputsMessage": ".inputs", - "InputsMessageTypedDict": ".inputs", "InputsTypedDict": ".inputs", - "InstructRequestInputs": ".inputs", - "InstructRequestInputsTypedDict": ".inputs", "InstructRequest": ".instructrequest", "InstructRequestMessage": ".instructrequest", "InstructRequestMessageTypedDict": ".instructrequest", "InstructRequestTypedDict": ".instructrequest", - "Hyperparameters": ".jobin", - "HyperparametersTypedDict": ".jobin", - "JobIn": ".jobin", - "JobInIntegration": ".jobin", - "JobInIntegrationTypedDict": ".jobin", - "JobInRepository": ".jobin", - "JobInRepositoryTypedDict": ".jobin", - "JobInTypedDict": ".jobin", - "JobMetadataOut": ".jobmetadataout", - "JobMetadataOutTypedDict": ".jobmetadataout", - "JobsOut": ".jobsout", - "JobsOutData": ".jobsout", - "JobsOutDataTypedDict": ".jobsout", - "JobsOutTypedDict": ".jobsout", + "JobMetadata": ".jobmetadata", + "JobMetadataTypedDict": ".jobmetadata", + "JobsAPIRoutesBatchCancelBatchJobRequest": ".jobs_api_routes_batch_cancel_batch_jobop", + "JobsAPIRoutesBatchCancelBatchJobRequestTypedDict": ".jobs_api_routes_batch_cancel_batch_jobop", + "JobsAPIRoutesBatchGetBatchJobRequest": ".jobs_api_routes_batch_get_batch_jobop", + "JobsAPIRoutesBatchGetBatchJobRequestTypedDict": ".jobs_api_routes_batch_get_batch_jobop", + "JobsAPIRoutesBatchGetBatchJobsRequest": ".jobs_api_routes_batch_get_batch_jobsop", + "JobsAPIRoutesBatchGetBatchJobsRequestTypedDict": ".jobs_api_routes_batch_get_batch_jobsop", + "OrderBy": ".jobs_api_routes_batch_get_batch_jobsop", + "JobsAPIRoutesFineTuningArchiveFineTunedModelRequest": ".jobs_api_routes_fine_tuning_archive_fine_tuned_modelop", + "JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict": ".jobs_api_routes_fine_tuning_archive_fine_tuned_modelop", + "JobsAPIRoutesFineTuningCancelFineTuningJobRequest": ".jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop", + "JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict": ".jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop", + "JobsAPIRoutesFineTuningCancelFineTuningJobResponse": ".jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop", + "JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict": ".jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop", + "UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse": ".jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop", + "JobsAPIRoutesFineTuningCreateFineTuningJobResponse": ".jobs_api_routes_fine_tuning_create_fine_tuning_jobop", + "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict": ".jobs_api_routes_fine_tuning_create_fine_tuning_jobop", + "Response": ".jobs_api_routes_fine_tuning_create_fine_tuning_jobop", + "ResponseTypedDict": ".jobs_api_routes_fine_tuning_create_fine_tuning_jobop", + "UnknownResponse": ".jobs_api_routes_fine_tuning_create_fine_tuning_jobop", + "JobsAPIRoutesFineTuningGetFineTuningJobRequest": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobop", + "JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobop", + "JobsAPIRoutesFineTuningGetFineTuningJobResponse": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobop", + "JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobop", + "UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobop", + "JobsAPIRoutesFineTuningGetFineTuningJobsRequest": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobsop", + "JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobsop", + "JobsAPIRoutesFineTuningGetFineTuningJobsStatus": ".jobs_api_routes_fine_tuning_get_fine_tuning_jobsop", + "JobsAPIRoutesFineTuningStartFineTuningJobRequest": ".jobs_api_routes_fine_tuning_start_fine_tuning_jobop", + "JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict": ".jobs_api_routes_fine_tuning_start_fine_tuning_jobop", + "JobsAPIRoutesFineTuningStartFineTuningJobResponse": ".jobs_api_routes_fine_tuning_start_fine_tuning_jobop", + "JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict": ".jobs_api_routes_fine_tuning_start_fine_tuning_jobop", + "UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse": ".jobs_api_routes_fine_tuning_start_fine_tuning_jobop", + "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest": ".jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop", + "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict": ".jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop", + "JobsAPIRoutesFineTuningUpdateFineTunedModelRequest": ".jobs_api_routes_fine_tuning_update_fine_tuned_modelop", + "JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict": ".jobs_api_routes_fine_tuning_update_fine_tuned_modelop", + "JobsAPIRoutesFineTuningUpdateFineTunedModelResponse": ".jobs_api_routes_fine_tuning_update_fine_tuned_modelop", + "JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict": ".jobs_api_routes_fine_tuning_update_fine_tuned_modelop", + "UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse": ".jobs_api_routes_fine_tuning_update_fine_tuned_modelop", "JSONSchema": ".jsonschema", "JSONSchemaTypedDict": ".jsonschema", - "LegacyJobMetadataOut": ".legacyjobmetadataout", - "LegacyJobMetadataOutTypedDict": ".legacyjobmetadataout", - "LibraryIn": ".libraryin", - "LibraryInTypedDict": ".libraryin", - "LibraryInUpdate": ".libraryinupdate", - "LibraryInUpdateTypedDict": ".libraryinupdate", - "LibraryOut": ".libraryout", - "LibraryOutTypedDict": ".libraryout", - "ListAgentAliasesRequest": ".listagentaliasesop", - "ListAgentAliasesRequestTypedDict": ".listagentaliasesop", - "ListAgentsRequest": ".listagentsop", - "ListAgentsRequestTypedDict": ".listagentsop", - "ListAgentVersionsRequest": ".listagentversionsop", - "ListAgentVersionsRequestTypedDict": ".listagentversionsop", - "ListBatchJobsRequest": ".listbatchjobsop", - "ListBatchJobsRequestTypedDict": ".listbatchjobsop", - "OrderBy": ".listbatchjobsop", - "ListConversationsRequest": ".listconversationsop", - "ListConversationsRequestTypedDict": ".listconversationsop", - "ListConversationsResponse": ".listconversationsop", - "ListConversationsResponseTypedDict": ".listconversationsop", - "ListDocumentOut": ".listdocumentout", - "ListDocumentOutTypedDict": ".listdocumentout", - "ListDocumentsRequest": ".listdocumentsop", - "ListDocumentsRequestTypedDict": ".listdocumentsop", - "ListFilesRequest": ".listfilesop", - "ListFilesRequestTypedDict": ".listfilesop", - "ListFilesOut": ".listfilesout", - "ListFilesOutTypedDict": ".listfilesout", - "ListFineTuningJobsRequest": ".listfinetuningjobsop", - "ListFineTuningJobsRequestTypedDict": ".listfinetuningjobsop", - "ListFineTuningJobsStatus": ".listfinetuningjobsop", - "ListLibraryAccessesRequest": ".listlibraryaccessesop", - "ListLibraryAccessesRequestTypedDict": ".listlibraryaccessesop", - "ListLibraryOut": ".listlibraryout", - "ListLibraryOutTypedDict": ".listlibraryout", + "LegacyJobMetadata": ".legacyjobmetadata", + "LegacyJobMetadataTypedDict": ".legacyjobmetadata", + "LibrariesDeleteV1Request": ".libraries_delete_v1op", + "LibrariesDeleteV1RequestTypedDict": ".libraries_delete_v1op", + "LibrariesDocumentsDeleteV1Request": ".libraries_documents_delete_v1op", + "LibrariesDocumentsDeleteV1RequestTypedDict": ".libraries_documents_delete_v1op", + "LibrariesDocumentsGetExtractedTextSignedURLV1Request": ".libraries_documents_get_extracted_text_signed_url_v1op", + "LibrariesDocumentsGetExtractedTextSignedURLV1RequestTypedDict": ".libraries_documents_get_extracted_text_signed_url_v1op", + "LibrariesDocumentsGetSignedURLV1Request": ".libraries_documents_get_signed_url_v1op", + "LibrariesDocumentsGetSignedURLV1RequestTypedDict": ".libraries_documents_get_signed_url_v1op", + "LibrariesDocumentsGetStatusV1Request": ".libraries_documents_get_status_v1op", + "LibrariesDocumentsGetStatusV1RequestTypedDict": ".libraries_documents_get_status_v1op", + "LibrariesDocumentsGetTextContentV1Request": ".libraries_documents_get_text_content_v1op", + "LibrariesDocumentsGetTextContentV1RequestTypedDict": ".libraries_documents_get_text_content_v1op", + "LibrariesDocumentsGetV1Request": ".libraries_documents_get_v1op", + "LibrariesDocumentsGetV1RequestTypedDict": ".libraries_documents_get_v1op", + "LibrariesDocumentsListV1Request": ".libraries_documents_list_v1op", + "LibrariesDocumentsListV1RequestTypedDict": ".libraries_documents_list_v1op", + "LibrariesDocumentsReprocessV1Request": ".libraries_documents_reprocess_v1op", + "LibrariesDocumentsReprocessV1RequestTypedDict": ".libraries_documents_reprocess_v1op", + "LibrariesDocumentsUpdateV1Request": ".libraries_documents_update_v1op", + "LibrariesDocumentsUpdateV1RequestTypedDict": ".libraries_documents_update_v1op", + "DocumentUpload": ".libraries_documents_upload_v1op", + "DocumentUploadTypedDict": ".libraries_documents_upload_v1op", + "LibrariesDocumentsUploadV1Request": ".libraries_documents_upload_v1op", + "LibrariesDocumentsUploadV1RequestTypedDict": ".libraries_documents_upload_v1op", + "LibrariesGetV1Request": ".libraries_get_v1op", + "LibrariesGetV1RequestTypedDict": ".libraries_get_v1op", + "LibrariesShareCreateV1Request": ".libraries_share_create_v1op", + "LibrariesShareCreateV1RequestTypedDict": ".libraries_share_create_v1op", + "LibrariesShareDeleteV1Request": ".libraries_share_delete_v1op", + "LibrariesShareDeleteV1RequestTypedDict": ".libraries_share_delete_v1op", + "LibrariesShareListV1Request": ".libraries_share_list_v1op", + "LibrariesShareListV1RequestTypedDict": ".libraries_share_list_v1op", + "LibrariesUpdateV1Request": ".libraries_update_v1op", + "LibrariesUpdateV1RequestTypedDict": ".libraries_update_v1op", + "Library": ".library", + "LibraryTypedDict": ".library", + "ListBatchJobsResponse": ".listbatchjobsresponse", + "ListBatchJobsResponseTypedDict": ".listbatchjobsresponse", + "ListDocumentsResponse": ".listdocumentsresponse", + "ListDocumentsResponseTypedDict": ".listdocumentsresponse", + "ListFilesResponse": ".listfilesresponse", + "ListFilesResponseTypedDict": ".listfilesresponse", + "ListFineTuningJobsResponse": ".listfinetuningjobsresponse", + "ListFineTuningJobsResponseData": ".listfinetuningjobsresponse", + "ListFineTuningJobsResponseDataTypedDict": ".listfinetuningjobsresponse", + "ListFineTuningJobsResponseTypedDict": ".listfinetuningjobsresponse", + "UnknownListFineTuningJobsResponseData": ".listfinetuningjobsresponse", + "ListLibrariesResponse": ".listlibrariesresponse", + "ListLibrariesResponseTypedDict": ".listlibrariesresponse", "ListSharingOut": ".listsharingout", "ListSharingOutTypedDict": ".listsharingout", "MessageEntries": ".messageentries", @@ -2022,51 +2142,45 @@ "MessageInputEntry": ".messageinputentry", "MessageInputEntryContent": ".messageinputentry", "MessageInputEntryContentTypedDict": ".messageinputentry", - "MessageInputEntryObject": ".messageinputentry", - "MessageInputEntryRole": ".messageinputentry", - "MessageInputEntryType": ".messageinputentry", "MessageInputEntryTypedDict": ".messageinputentry", + "Role": ".messageinputentry", "MessageOutputContentChunks": ".messageoutputcontentchunks", "MessageOutputContentChunksTypedDict": ".messageoutputcontentchunks", "MessageOutputEntry": ".messageoutputentry", "MessageOutputEntryContent": ".messageoutputentry", "MessageOutputEntryContentTypedDict": ".messageoutputentry", - "MessageOutputEntryObject": ".messageoutputentry", - "MessageOutputEntryRole": ".messageoutputentry", - "MessageOutputEntryType": ".messageoutputentry", "MessageOutputEntryTypedDict": ".messageoutputentry", "MessageOutputEvent": ".messageoutputevent", "MessageOutputEventContent": ".messageoutputevent", "MessageOutputEventContentTypedDict": ".messageoutputevent", - "MessageOutputEventRole": ".messageoutputevent", "MessageOutputEventTypedDict": ".messageoutputevent", - "MetricOut": ".metricout", - "MetricOutTypedDict": ".metricout", + "Metric": ".metric", + "MetricTypedDict": ".metric", "MistralPromptMode": ".mistralpromptmode", "ModelCapabilities": ".modelcapabilities", "ModelCapabilitiesTypedDict": ".modelcapabilities", "ModelConversation": ".modelconversation", - "ModelConversationObject": ".modelconversation", "ModelConversationTool": ".modelconversation", "ModelConversationToolTypedDict": ".modelconversation", "ModelConversationTypedDict": ".modelconversation", + "UnknownModelConversationTool": ".modelconversation", "ModelList": ".modellist", "ModelListData": ".modellist", "ModelListDataTypedDict": ".modellist", "ModelListTypedDict": ".modellist", + "UnknownModelListData": ".modellist", "ModerationObject": ".moderationobject", "ModerationObjectTypedDict": ".moderationobject", "ModerationResponse": ".moderationresponse", "ModerationResponseTypedDict": ".moderationresponse", - "NoResponseError": ".no_response_error", "OCRImageObject": ".ocrimageobject", "OCRImageObjectTypedDict": ".ocrimageobject", "OCRPageDimensions": ".ocrpagedimensions", "OCRPageDimensionsTypedDict": ".ocrpagedimensions", "OCRPageObject": ".ocrpageobject", "OCRPageObjectTypedDict": ".ocrpageobject", - "Document": ".ocrrequest", - "DocumentTypedDict": ".ocrrequest", + "DocumentUnion": ".ocrrequest", + "DocumentUnionTypedDict": ".ocrrequest", "OCRRequest": ".ocrrequest", "OCRRequestTypedDict": ".ocrrequest", "TableFormat": ".ocrrequest", @@ -2091,17 +2205,24 @@ "RealtimeTranscriptionErrorDetailMessage": ".realtimetranscriptionerrordetail", "RealtimeTranscriptionErrorDetailMessageTypedDict": ".realtimetranscriptionerrordetail", "RealtimeTranscriptionErrorDetailTypedDict": ".realtimetranscriptionerrordetail", + "RealtimeTranscriptionInputAudioAppend": ".realtimetranscriptioninputaudioappend", + "RealtimeTranscriptionInputAudioAppendTypedDict": ".realtimetranscriptioninputaudioappend", + "RealtimeTranscriptionInputAudioEnd": ".realtimetranscriptioninputaudioend", + "RealtimeTranscriptionInputAudioEndTypedDict": ".realtimetranscriptioninputaudioend", + "RealtimeTranscriptionInputAudioFlush": ".realtimetranscriptioninputaudioflush", + "RealtimeTranscriptionInputAudioFlushTypedDict": ".realtimetranscriptioninputaudioflush", "RealtimeTranscriptionSession": ".realtimetranscriptionsession", "RealtimeTranscriptionSessionTypedDict": ".realtimetranscriptionsession", "RealtimeTranscriptionSessionCreated": ".realtimetranscriptionsessioncreated", "RealtimeTranscriptionSessionCreatedTypedDict": ".realtimetranscriptionsessioncreated", "RealtimeTranscriptionSessionUpdated": ".realtimetranscriptionsessionupdated", "RealtimeTranscriptionSessionUpdatedTypedDict": ".realtimetranscriptionsessionupdated", + "RealtimeTranscriptionSessionUpdateMessage": ".realtimetranscriptionsessionupdatemessage", + "RealtimeTranscriptionSessionUpdateMessageTypedDict": ".realtimetranscriptionsessionupdatemessage", + "RealtimeTranscriptionSessionUpdatePayload": ".realtimetranscriptionsessionupdatepayload", + "RealtimeTranscriptionSessionUpdatePayloadTypedDict": ".realtimetranscriptionsessionupdatepayload", "ReferenceChunk": ".referencechunk", - "ReferenceChunkType": ".referencechunk", "ReferenceChunkTypedDict": ".referencechunk", - "ReprocessDocumentRequest": ".reprocessdocumentop", - "ReprocessDocumentRequestTypedDict": ".reprocessdocumentop", "RequestSource": ".requestsource", "ResponseDoneEvent": ".responsedoneevent", "ResponseDoneEventTypedDict": ".responsedoneevent", @@ -2112,21 +2233,12 @@ "ResponseFormats": ".responseformats", "ResponseStartedEvent": ".responsestartedevent", "ResponseStartedEventTypedDict": ".responsestartedevent", - "ResponseValidationError": ".responsevalidationerror", - "RestartConversationRequest": ".restartconversationop", - "RestartConversationRequestTypedDict": ".restartconversationop", - "RestartConversationStreamRequest": ".restartconversationstreamop", - "RestartConversationStreamRequestTypedDict": ".restartconversationstreamop", - "RetrieveFileRequest": ".retrievefileop", - "RetrieveFileRequestTypedDict": ".retrievefileop", - "RetrieveFileOut": ".retrievefileout", - "RetrieveFileOutTypedDict": ".retrievefileout", - "ResponseRetrieveModelV1ModelsModelIDGet": ".retrievemodelop", - "ResponseRetrieveModelV1ModelsModelIDGetTypedDict": ".retrievemodelop", - "RetrieveModelRequest": ".retrievemodelop", - "RetrieveModelRequestTypedDict": ".retrievemodelop", + "ResponseRetrieveModelV1ModelsModelIDGet": ".retrieve_model_v1_models_model_id_getop", + "ResponseRetrieveModelV1ModelsModelIDGetTypedDict": ".retrieve_model_v1_models_model_id_getop", + "RetrieveModelV1ModelsModelIDGetRequest": ".retrieve_model_v1_models_model_id_getop", + "RetrieveModelV1ModelsModelIDGetRequestTypedDict": ".retrieve_model_v1_models_model_id_getop", + "UnknownResponseRetrieveModelV1ModelsModelIDGet": ".retrieve_model_v1_models_model_id_getop", "SampleType": ".sampletype", - "SDKError": ".sdkerror", "Security": ".security", "SecurityTypedDict": ".security", "ShareEnum": ".shareenum", @@ -2138,10 +2250,6 @@ "SharingOutTypedDict": ".sharingout", "Source": ".source", "SSETypes": ".ssetypes", - "StartFineTuningJobRequest": ".startfinetuningjobop", - "StartFineTuningJobRequestTypedDict": ".startfinetuningjobop", - "StartFineTuningJobResponse": ".startfinetuningjobop", - "StartFineTuningJobResponseTypedDict": ".startfinetuningjobop", "SystemMessage": ".systemmessage", "SystemMessageContent": ".systemmessage", "SystemMessageContentTypedDict": ".systemmessage", @@ -2149,21 +2257,24 @@ "SystemMessageContentChunks": ".systemmessagecontentchunks", "SystemMessageContentChunksTypedDict": ".systemmessagecontentchunks", "TextChunk": ".textchunk", - "TextChunkType": ".textchunk", "TextChunkTypedDict": ".textchunk", "ThinkChunk": ".thinkchunk", - "ThinkChunkType": ".thinkchunk", + "ThinkChunkThinking": ".thinkchunk", + "ThinkChunkThinkingTypedDict": ".thinkchunk", "ThinkChunkTypedDict": ".thinkchunk", - "Thinking": ".thinkchunk", - "ThinkingTypedDict": ".thinkchunk", "TimestampGranularity": ".timestampgranularity", "Tool": ".tool", "ToolTypedDict": ".tool", "ToolCall": ".toolcall", "ToolCallTypedDict": ".toolcall", + "Confirmation": ".toolcallconfirmation", + "ToolCallConfirmation": ".toolcallconfirmation", + "ToolCallConfirmationTypedDict": ".toolcallconfirmation", "ToolChoice": ".toolchoice", "ToolChoiceTypedDict": ".toolchoice", "ToolChoiceEnum": ".toolchoiceenum", + "ToolConfiguration": ".toolconfiguration", + "ToolConfigurationTypedDict": ".toolconfiguration", "ToolExecutionDeltaEvent": ".toolexecutiondeltaevent", "ToolExecutionDeltaEventName": ".toolexecutiondeltaevent", "ToolExecutionDeltaEventNameTypedDict": ".toolexecutiondeltaevent", @@ -2175,8 +2286,6 @@ "ToolExecutionEntry": ".toolexecutionentry", "ToolExecutionEntryName": ".toolexecutionentry", "ToolExecutionEntryNameTypedDict": ".toolexecutionentry", - "ToolExecutionEntryObject": ".toolexecutionentry", - "ToolExecutionEntryType": ".toolexecutionentry", "ToolExecutionEntryTypedDict": ".toolexecutionentry", "ToolExecutionStartedEvent": ".toolexecutionstartedevent", "ToolExecutionStartedEventName": ".toolexecutionstartedevent", @@ -2185,7 +2294,6 @@ "ToolFileChunk": ".toolfilechunk", "ToolFileChunkTool": ".toolfilechunk", "ToolFileChunkToolTypedDict": ".toolfilechunk", - "ToolFileChunkType": ".toolfilechunk", "ToolFileChunkTypedDict": ".toolfilechunk", "ToolMessage": ".toolmessage", "ToolMessageContent": ".toolmessage", @@ -2194,7 +2302,6 @@ "ToolReferenceChunk": ".toolreferencechunk", "ToolReferenceChunkTool": ".toolreferencechunk", "ToolReferenceChunkToolTypedDict": ".toolreferencechunk", - "ToolReferenceChunkType": ".toolreferencechunk", "ToolReferenceChunkTypedDict": ".toolreferencechunk", "ToolTypes": ".tooltypes", "TrainingFile": ".trainingfile", @@ -2202,7 +2309,6 @@ "TranscriptionResponse": ".transcriptionresponse", "TranscriptionResponseTypedDict": ".transcriptionresponse", "TranscriptionSegmentChunk": ".transcriptionsegmentchunk", - "TranscriptionSegmentChunkType": ".transcriptionsegmentchunk", "TranscriptionSegmentChunkTypedDict": ".transcriptionsegmentchunk", "TranscriptionStreamDone": ".transcriptionstreamdone", "TranscriptionStreamDoneTypedDict": ".transcriptionstreamdone", @@ -2210,6 +2316,7 @@ "TranscriptionStreamEventsData": ".transcriptionstreamevents", "TranscriptionStreamEventsDataTypedDict": ".transcriptionstreamevents", "TranscriptionStreamEventsTypedDict": ".transcriptionstreamevents", + "UnknownTranscriptionStreamEventsData": ".transcriptionstreamevents", "TranscriptionStreamEventTypes": ".transcriptionstreameventtypes", "TranscriptionStreamLanguage": ".transcriptionstreamlanguage", "TranscriptionStreamLanguageTypedDict": ".transcriptionstreamlanguage", @@ -2217,34 +2324,20 @@ "TranscriptionStreamSegmentDeltaTypedDict": ".transcriptionstreamsegmentdelta", "TranscriptionStreamTextDelta": ".transcriptionstreamtextdelta", "TranscriptionStreamTextDeltaTypedDict": ".transcriptionstreamtextdelta", - "UnarchiveFTModelOut": ".unarchiveftmodelout", - "UnarchiveFTModelOutTypedDict": ".unarchiveftmodelout", - "UnarchiveModelRequest": ".unarchivemodelop", - "UnarchiveModelRequestTypedDict": ".unarchivemodelop", - "UpdateAgentRequest": ".updateagentop", - "UpdateAgentRequestTypedDict": ".updateagentop", - "UpdateAgentVersionRequest": ".updateagentversionop", - "UpdateAgentVersionRequestTypedDict": ".updateagentversionop", - "UpdateDocumentRequest": ".updatedocumentop", - "UpdateDocumentRequestTypedDict": ".updatedocumentop", - "UpdateFTModelIn": ".updateftmodelin", - "UpdateFTModelInTypedDict": ".updateftmodelin", - "UpdateLibraryRequest": ".updatelibraryop", - "UpdateLibraryRequestTypedDict": ".updatelibraryop", - "UpdateModelRequest": ".updatemodelop", - "UpdateModelRequestTypedDict": ".updatemodelop", - "UpdateModelResponse": ".updatemodelop", - "UpdateModelResponseTypedDict": ".updatemodelop", - "UpdateOrCreateLibraryAccessRequest": ".updateorcreatelibraryaccessop", - "UpdateOrCreateLibraryAccessRequestTypedDict": ".updateorcreatelibraryaccessop", - "DocumentUpload": ".uploaddocumentop", - "DocumentUploadTypedDict": ".uploaddocumentop", - "UploadDocumentRequest": ".uploaddocumentop", - "UploadDocumentRequestTypedDict": ".uploaddocumentop", - "MultiPartBodyParams": ".uploadfileop", - "MultiPartBodyParamsTypedDict": ".uploadfileop", - "UploadFileOut": ".uploadfileout", - "UploadFileOutTypedDict": ".uploadfileout", + "UnarchiveModelResponse": ".unarchivemodelresponse", + "UnarchiveModelResponseTypedDict": ".unarchivemodelresponse", + "UpdateAgentRequest": ".updateagentrequest", + "UpdateAgentRequestTool": ".updateagentrequest", + "UpdateAgentRequestToolTypedDict": ".updateagentrequest", + "UpdateAgentRequestTypedDict": ".updateagentrequest", + "Attributes": ".updatedocumentrequest", + "AttributesTypedDict": ".updatedocumentrequest", + "UpdateDocumentRequest": ".updatedocumentrequest", + "UpdateDocumentRequestTypedDict": ".updatedocumentrequest", + "UpdateLibraryRequest": ".updatelibraryrequest", + "UpdateLibraryRequestTypedDict": ".updatelibraryrequest", + "UpdateModelRequest": ".updatemodelrequest", + "UpdateModelRequestTypedDict": ".updatemodelrequest", "UsageInfo": ".usageinfo", "UsageInfoTypedDict": ".usageinfo", "UserMessage": ".usermessage", @@ -2257,8 +2350,8 @@ "ValidationErrorTypedDict": ".validationerror", "WandbIntegration": ".wandbintegration", "WandbIntegrationTypedDict": ".wandbintegration", - "WandbIntegrationOut": ".wandbintegrationout", - "WandbIntegrationOutTypedDict": ".wandbintegrationout", + "WandbIntegrationResult": ".wandbintegrationresult", + "WandbIntegrationResultTypedDict": ".wandbintegrationresult", "WebSearchPremiumTool": ".websearchpremiumtool", "WebSearchPremiumToolTypedDict": ".websearchpremiumtool", "WebSearchTool": ".websearchtool", @@ -2266,39 +2359,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"No {attr_name} found in _dynamic_imports for module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - result = getattr(module, attr_name) - return result - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/src/mistralai/client/models/agent.py b/src/mistralai/client/models/agent.py index 05ae24cd..686a6eb8 100644 --- a/src/mistralai/client/models/agent.py +++ b/src/mistralai/client/models/agent.py @@ -10,6 +10,7 @@ from .websearchpremiumtool import WebSearchPremiumTool, WebSearchPremiumToolTypedDict from .websearchtool import WebSearchTool, WebSearchToolTypedDict from datetime import datetime +from functools import partial from mistralai.client.types import ( BaseModel, Nullable, @@ -17,7 +18,11 @@ UNSET, UNSET_SENTINEL, ) -from pydantic import Field, model_serializer +from mistralai.client.utils import validate_const +from mistralai.client.utils.unions import parse_open_union +import pydantic +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator, BeforeValidator from typing import Any, Dict, List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -25,16 +30,36 @@ AgentToolTypedDict = TypeAliasType( "AgentToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) +class UnknownAgentTool(BaseModel): + r"""A AgentTool variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_AGENT_TOOL_VARIANTS: dict[str, Any] = { + "code_interpreter": CodeInterpreterTool, + "document_library": DocumentLibraryTool, + "function": FunctionTool, + "image_generation": ImageGenerationTool, + "web_search": WebSearchTool, + "web_search_premium": WebSearchPremiumTool, +} + + AgentTool = Annotated[ Union[ CodeInterpreterTool, @@ -43,14 +68,20 @@ ImageGenerationTool, WebSearchTool, WebSearchPremiumTool, + UnknownAgentTool, ], - Field(discriminator="TYPE"), + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_AGENT_TOOL_VARIANTS, + unknown_cls=UnknownAgentTool, + union_name="AgentTool", + ) + ), ] -AgentObject = Literal["agent",] - - class AgentTypedDict(TypedDict): model: str name: str @@ -70,7 +101,7 @@ class AgentTypedDict(TypedDict): description: NotRequired[Nullable[str]] handoffs: NotRequired[Nullable[List[str]]] metadata: NotRequired[Nullable[Dict[str, Any]]] - object: NotRequired[AgentObject] + object: Literal["agent"] version_message: NotRequired[Nullable[str]] @@ -108,51 +139,53 @@ class Agent(BaseModel): metadata: OptionalNullable[Dict[str, Any]] = UNSET - object: Optional[AgentObject] = "agent" + object: Annotated[ + Annotated[Optional[Literal["agent"]], AfterValidator(validate_const("agent"))], + pydantic.Field(alias="object"), + ] = "agent" version_message: OptionalNullable[str] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "instructions", - "tools", - "completion_args", - "description", - "handoffs", - "metadata", - "object", - "version_message", - ] - nullable_fields = [ - "instructions", - "description", - "handoffs", - "metadata", - "version_message", - ] - null_default_fields = [] - + optional_fields = set( + [ + "instructions", + "tools", + "completion_args", + "description", + "handoffs", + "metadata", + "object", + "version_message", + ] + ) + nullable_fields = set( + ["instructions", "description", "handoffs", "metadata", "version_message"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + Agent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/agentconversation.py b/src/mistralai/client/models/agentconversation.py index a850d54c..da30c663 100644 --- a/src/mistralai/client/models/agentconversation.py +++ b/src/mistralai/client/models/agentconversation.py @@ -10,12 +10,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Any, Dict, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -AgentConversationObject = Literal["conversation",] +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict AgentConversationAgentVersionTypedDict = TypeAliasType( @@ -39,7 +39,7 @@ class AgentConversationTypedDict(TypedDict): r"""Description of the what the conversation is about.""" metadata: NotRequired[Nullable[Dict[str, Any]]] r"""Custom metadata for the conversation.""" - object: NotRequired[AgentConversationObject] + object: Literal["conversation"] agent_version: NotRequired[Nullable[AgentConversationAgentVersionTypedDict]] @@ -61,36 +61,45 @@ class AgentConversation(BaseModel): metadata: OptionalNullable[Dict[str, Any]] = UNSET r"""Custom metadata for the conversation.""" - object: Optional[AgentConversationObject] = "conversation" + object: Annotated[ + Annotated[ + Optional[Literal["conversation"]], + AfterValidator(validate_const("conversation")), + ], + pydantic.Field(alias="object"), + ] = "conversation" agent_version: OptionalNullable[AgentConversationAgentVersion] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["name", "description", "metadata", "object", "agent_version"] - nullable_fields = ["name", "description", "metadata", "agent_version"] - null_default_fields = [] - + optional_fields = set( + ["name", "description", "metadata", "object", "agent_version"] + ) + nullable_fields = set(["name", "description", "metadata", "agent_version"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AgentConversation.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/agenthandoffdoneevent.py b/src/mistralai/client/models/agenthandoffdoneevent.py index 40bf8497..e2609e3d 100644 --- a/src/mistralai/client/models/agenthandoffdoneevent.py +++ b/src/mistralai/client/models/agenthandoffdoneevent.py @@ -3,9 +3,10 @@ from __future__ import annotations from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -27,7 +28,7 @@ class AgentHandoffDoneEvent(BaseModel): next_agent_name: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["agent.handoff.done"], AfterValidator(validate_const("agent.handoff.done")), @@ -38,3 +39,25 @@ class AgentHandoffDoneEvent(BaseModel): created_at: Optional[datetime] = None output_index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at", "output_index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + AgentHandoffDoneEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/agenthandoffentry.py b/src/mistralai/client/models/agenthandoffentry.py index b18fe17c..f92ef2cc 100644 --- a/src/mistralai/client/models/agenthandoffentry.py +++ b/src/mistralai/client/models/agenthandoffentry.py @@ -10,15 +10,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -AgentHandoffEntryObject = Literal["entry",] - - -AgentHandoffEntryType = Literal["agent.handoff",] +from typing_extensions import Annotated, NotRequired, TypedDict class AgentHandoffEntryTypedDict(TypedDict): @@ -26,8 +23,8 @@ class AgentHandoffEntryTypedDict(TypedDict): previous_agent_name: str next_agent_id: str next_agent_name: str - object: NotRequired[AgentHandoffEntryObject] - type: NotRequired[AgentHandoffEntryType] + object: Literal["entry"] + type: Literal["agent.handoff"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] id: NotRequired[str] @@ -42,9 +39,18 @@ class AgentHandoffEntry(BaseModel): next_agent_name: str - object: Optional[AgentHandoffEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[AgentHandoffEntryType] = "agent.handoff" + type: Annotated[ + Annotated[ + Optional[Literal["agent.handoff"]], + AfterValidator(validate_const("agent.handoff")), + ], + pydantic.Field(alias="type"), + ] = "agent.handoff" created_at: Optional[datetime] = None @@ -54,30 +60,31 @@ class AgentHandoffEntry(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["object", "type", "created_at", "completed_at", "id"] - nullable_fields = ["completed_at"] - null_default_fields = [] - + optional_fields = set(["object", "type", "created_at", "completed_at", "id"]) + nullable_fields = set(["completed_at"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AgentHandoffEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/agenthandoffstartedevent.py b/src/mistralai/client/models/agenthandoffstartedevent.py index e278aef3..2a402341 100644 --- a/src/mistralai/client/models/agenthandoffstartedevent.py +++ b/src/mistralai/client/models/agenthandoffstartedevent.py @@ -3,9 +3,10 @@ from __future__ import annotations from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -27,7 +28,7 @@ class AgentHandoffStartedEvent(BaseModel): previous_agent_name: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["agent.handoff.started"], AfterValidator(validate_const("agent.handoff.started")), @@ -38,3 +39,25 @@ class AgentHandoffStartedEvent(BaseModel): created_at: Optional[datetime] = None output_index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at", "output_index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + AgentHandoffStartedEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/createorupdateagentaliasop.py b/src/mistralai/client/models/agents_api_v1_agents_create_or_update_aliasop.py similarity index 80% rename from src/mistralai/client/models/createorupdateagentaliasop.py rename to src/mistralai/client/models/agents_api_v1_agents_create_or_update_aliasop.py index cde1dd05..04761ae7 100644 --- a/src/mistralai/client/models/createorupdateagentaliasop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_create_or_update_aliasop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: a79cf28bda01 +# @generated-id: 23a832f8f175 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,13 +7,13 @@ from typing_extensions import Annotated, TypedDict -class CreateOrUpdateAgentAliasRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsCreateOrUpdateAliasRequestTypedDict(TypedDict): agent_id: str alias: str version: int -class CreateOrUpdateAgentAliasRequest(BaseModel): +class AgentsAPIV1AgentsCreateOrUpdateAliasRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deleteagentaliasop.py b/src/mistralai/client/models/agents_api_v1_agents_delete_aliasop.py similarity index 78% rename from src/mistralai/client/models/deleteagentaliasop.py rename to src/mistralai/client/models/agents_api_v1_agents_delete_aliasop.py index c52d099e..291a9802 100644 --- a/src/mistralai/client/models/deleteagentaliasop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_delete_aliasop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: e4d0d7f75b24 +# @generated-id: 9c9947e768d3 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class DeleteAgentAliasRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsDeleteAliasRequestTypedDict(TypedDict): agent_id: str alias: str -class DeleteAgentAliasRequest(BaseModel): +class AgentsAPIV1AgentsDeleteAliasRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/listagentaliasesop.py b/src/mistralai/client/models/agents_api_v1_agents_deleteop.py similarity index 74% rename from src/mistralai/client/models/listagentaliasesop.py rename to src/mistralai/client/models/agents_api_v1_agents_deleteop.py index 83c6d176..5e41fdcd 100644 --- a/src/mistralai/client/models/listagentaliasesop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_deleteop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ff038766a902 +# @generated-id: 95adb6768908 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class ListAgentAliasesRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsDeleteRequestTypedDict(TypedDict): agent_id: str -class ListAgentAliasesRequest(BaseModel): +class AgentsAPIV1AgentsDeleteRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getagentversionop.py b/src/mistralai/client/models/agents_api_v1_agents_get_versionop.py similarity index 78% rename from src/mistralai/client/models/getagentversionop.py rename to src/mistralai/client/models/agents_api_v1_agents_get_versionop.py index 77b8a266..941863d0 100644 --- a/src/mistralai/client/models/getagentversionop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_get_versionop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: a0db5a6aab1f +# @generated-id: ef9914284afb from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetAgentVersionRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsGetVersionRequestTypedDict(TypedDict): agent_id: str version: str -class GetAgentVersionRequest(BaseModel): +class AgentsAPIV1AgentsGetVersionRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/agents_api_v1_agents_getop.py b/src/mistralai/client/models/agents_api_v1_agents_getop.py new file mode 100644 index 00000000..dd17580d --- /dev/null +++ b/src/mistralai/client/models/agents_api_v1_agents_getop.py @@ -0,0 +1,66 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: f5918c34f1c7 + +from __future__ import annotations +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from mistralai.client.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +from pydantic import model_serializer +from typing import Union +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict + + +AgentsAPIV1AgentsGetAgentVersionTypedDict = TypeAliasType( + "AgentsAPIV1AgentsGetAgentVersionTypedDict", Union[int, str] +) + + +AgentsAPIV1AgentsGetAgentVersion = TypeAliasType( + "AgentsAPIV1AgentsGetAgentVersion", Union[int, str] +) + + +class AgentsAPIV1AgentsGetRequestTypedDict(TypedDict): + agent_id: str + agent_version: NotRequired[Nullable[AgentsAPIV1AgentsGetAgentVersionTypedDict]] + + +class AgentsAPIV1AgentsGetRequest(BaseModel): + agent_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + agent_version: Annotated[ + OptionalNullable[AgentsAPIV1AgentsGetAgentVersion], + FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), + ] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["agent_version"]) + nullable_fields = set(["agent_version"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/deleteagentop.py b/src/mistralai/client/models/agents_api_v1_agents_list_version_aliasesop.py similarity index 71% rename from src/mistralai/client/models/deleteagentop.py rename to src/mistralai/client/models/agents_api_v1_agents_list_version_aliasesop.py index 8b14bca7..bb1da602 100644 --- a/src/mistralai/client/models/deleteagentop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_list_version_aliasesop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 089fb7f87aea +# @generated-id: a04815e6c798 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class DeleteAgentRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsListVersionAliasesRequestTypedDict(TypedDict): agent_id: str -class DeleteAgentRequest(BaseModel): +class AgentsAPIV1AgentsListVersionAliasesRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/listagentversionsop.py b/src/mistralai/client/models/agents_api_v1_agents_list_versionsop.py similarity index 56% rename from src/mistralai/client/models/listagentversionsop.py rename to src/mistralai/client/models/agents_api_v1_agents_list_versionsop.py index 613d3d85..54b62e90 100644 --- a/src/mistralai/client/models/listagentversionsop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_list_versionsop.py @@ -1,14 +1,15 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ccc5fb48e78f +# @generated-id: 19e3310c3907 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +from pydantic import model_serializer from typing import Optional from typing_extensions import Annotated, NotRequired, TypedDict -class ListAgentVersionsRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsListVersionsRequestTypedDict(TypedDict): agent_id: str page: NotRequired[int] r"""Page number (0-indexed)""" @@ -16,7 +17,7 @@ class ListAgentVersionsRequestTypedDict(TypedDict): r"""Number of versions per page""" -class ListAgentVersionsRequest(BaseModel): +class AgentsAPIV1AgentsListVersionsRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] @@ -32,3 +33,19 @@ class ListAgentVersionsRequest(BaseModel): FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), ] = 20 r"""Number of versions per page""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["page", "page_size"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/listagentsop.py b/src/mistralai/client/models/agents_api_v1_agents_listop.py similarity index 70% rename from src/mistralai/client/models/listagentsop.py rename to src/mistralai/client/models/agents_api_v1_agents_listop.py index 863fc13a..97b1c7f1 100644 --- a/src/mistralai/client/models/listagentsop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_listop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: a573a873c404 +# @generated-id: 25a6460a6e19 from __future__ import annotations from .requestsource import RequestSource @@ -16,7 +16,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class ListAgentsRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsListRequestTypedDict(TypedDict): page: NotRequired[int] r"""Page number (0-indexed)""" page_size: NotRequired[int] @@ -31,7 +31,7 @@ class ListAgentsRequestTypedDict(TypedDict): metadata: NotRequired[Nullable[Dict[str, Any]]] -class ListAgentsRequest(BaseModel): +class AgentsAPIV1AgentsListRequest(BaseModel): page: Annotated[ Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), @@ -78,46 +78,38 @@ class ListAgentsRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "page", - "page_size", - "deployment_chat", - "sources", - "name", - "search", - "id", - "metadata", - ] - nullable_fields = [ - "deployment_chat", - "sources", - "name", - "search", - "id", - "metadata", - ] - null_default_fields = [] - + optional_fields = set( + [ + "page", + "page_size", + "deployment_chat", + "sources", + "name", + "search", + "id", + "metadata", + ] + ) + nullable_fields = set( + ["deployment_chat", "sources", "name", "search", "id", "metadata"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/updateagentversionop.py b/src/mistralai/client/models/agents_api_v1_agents_update_versionop.py similarity index 78% rename from src/mistralai/client/models/updateagentversionop.py rename to src/mistralai/client/models/agents_api_v1_agents_update_versionop.py index 114013bc..5ab821ea 100644 --- a/src/mistralai/client/models/updateagentversionop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_update_versionop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 3821dca5b20a +# @generated-id: 63f61b8891bf from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class UpdateAgentVersionRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsUpdateVersionRequestTypedDict(TypedDict): agent_id: str version: int -class UpdateAgentVersionRequest(BaseModel): +class AgentsAPIV1AgentsUpdateVersionRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/updateagentop.py b/src/mistralai/client/models/agents_api_v1_agents_updateop.py similarity index 62% rename from src/mistralai/client/models/updateagentop.py rename to src/mistralai/client/models/agents_api_v1_agents_updateop.py index 28acc83d..69da5001 100644 --- a/src/mistralai/client/models/updateagentop.py +++ b/src/mistralai/client/models/agents_api_v1_agents_updateop.py @@ -1,24 +1,24 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ae3a6abea468 +# @generated-id: bb55993c932d from __future__ import annotations -from .agentupdaterequest import AgentUpdateRequest, AgentUpdateRequestTypedDict +from .updateagentrequest import UpdateAgentRequest, UpdateAgentRequestTypedDict from mistralai.client.types import BaseModel from mistralai.client.utils import FieldMetadata, PathParamMetadata, RequestMetadata from typing_extensions import Annotated, TypedDict -class UpdateAgentRequestTypedDict(TypedDict): +class AgentsAPIV1AgentsUpdateRequestTypedDict(TypedDict): agent_id: str - agent_update_request: AgentUpdateRequestTypedDict + update_agent_request: UpdateAgentRequestTypedDict -class UpdateAgentRequest(BaseModel): +class AgentsAPIV1AgentsUpdateRequest(BaseModel): agent_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] - agent_update_request: Annotated[ - AgentUpdateRequest, + update_agent_request: Annotated[ + UpdateAgentRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] diff --git a/src/mistralai/client/models/appendconversationstreamop.py b/src/mistralai/client/models/agents_api_v1_conversations_append_streamop.py similarity index 85% rename from src/mistralai/client/models/appendconversationstreamop.py rename to src/mistralai/client/models/agents_api_v1_conversations_append_streamop.py index 55efca0e..d257dc78 100644 --- a/src/mistralai/client/models/appendconversationstreamop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_append_streamop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 1ab08b189e9d +# @generated-id: ec00e0905f15 from __future__ import annotations from .conversationappendstreamrequest import ( @@ -11,13 +11,13 @@ from typing_extensions import Annotated, TypedDict -class AppendConversationStreamRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsAppendStreamRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation to which we append entries.""" conversation_append_stream_request: ConversationAppendStreamRequestTypedDict -class AppendConversationStreamRequest(BaseModel): +class AgentsAPIV1ConversationsAppendStreamRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/appendconversationop.py b/src/mistralai/client/models/agents_api_v1_conversations_appendop.py similarity index 85% rename from src/mistralai/client/models/appendconversationop.py rename to src/mistralai/client/models/agents_api_v1_conversations_appendop.py index 710b8e1c..61fec083 100644 --- a/src/mistralai/client/models/appendconversationop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_appendop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 1c47dd1e7c7e +# @generated-id: 39c6125e850c from __future__ import annotations from .conversationappendrequest import ( @@ -11,13 +11,13 @@ from typing_extensions import Annotated, TypedDict -class AppendConversationRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsAppendRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation to which we append entries.""" conversation_append_request: ConversationAppendRequestTypedDict -class AppendConversationRequest(BaseModel): +class AgentsAPIV1ConversationsAppendRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deleteconversationop.py b/src/mistralai/client/models/agents_api_v1_conversations_deleteop.py similarity index 78% rename from src/mistralai/client/models/deleteconversationop.py rename to src/mistralai/client/models/agents_api_v1_conversations_deleteop.py index 39607f40..499645a7 100644 --- a/src/mistralai/client/models/deleteconversationop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_deleteop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 86fefc353db0 +# @generated-id: 0792e6abbdcb from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class DeleteConversationRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsDeleteRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation from which we are fetching metadata.""" -class DeleteConversationRequest(BaseModel): +class AgentsAPIV1ConversationsDeleteRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getconversationop.py b/src/mistralai/client/models/agents_api_v1_conversations_getop.py similarity index 88% rename from src/mistralai/client/models/getconversationop.py rename to src/mistralai/client/models/agents_api_v1_conversations_getop.py index d204d175..504616ab 100644 --- a/src/mistralai/client/models/getconversationop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_getop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 1a622b8337ac +# @generated-id: c530f2fc64d0 from __future__ import annotations from .agentconversation import AgentConversation, AgentConversationTypedDict @@ -10,12 +10,12 @@ from typing_extensions import Annotated, TypeAliasType, TypedDict -class GetConversationRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsGetRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation from which we are fetching metadata.""" -class GetConversationRequest(BaseModel): +class AgentsAPIV1ConversationsGetRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getconversationhistoryop.py b/src/mistralai/client/models/agents_api_v1_conversations_historyop.py similarity index 78% rename from src/mistralai/client/models/getconversationhistoryop.py rename to src/mistralai/client/models/agents_api_v1_conversations_historyop.py index c1fbf3de..ef0a4eb0 100644 --- a/src/mistralai/client/models/getconversationhistoryop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_historyop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: c863a4cbeb34 +# @generated-id: 2f5ca33768aa from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetConversationHistoryRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsHistoryRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation from which we are fetching entries.""" -class GetConversationHistoryRequest(BaseModel): +class AgentsAPIV1ConversationsHistoryRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/listconversationsop.py b/src/mistralai/client/models/agents_api_v1_conversations_listop.py similarity index 59% rename from src/mistralai/client/models/listconversationsop.py rename to src/mistralai/client/models/agents_api_v1_conversations_listop.py index 1c9a347c..8bf66aea 100644 --- a/src/mistralai/client/models/listconversationsop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_listop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d6007f6c1643 +# @generated-id: 936e36181d36 from __future__ import annotations from .agentconversation import AgentConversation, AgentConversationTypedDict @@ -17,13 +17,13 @@ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -class ListConversationsRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsListRequestTypedDict(TypedDict): page: NotRequired[int] page_size: NotRequired[int] metadata: NotRequired[Nullable[Dict[str, Any]]] -class ListConversationsRequest(BaseModel): +class AgentsAPIV1ConversationsListRequest(BaseModel): page: Annotated[ Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), @@ -41,41 +41,36 @@ class ListConversationsRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["page", "page_size", "metadata"] - nullable_fields = ["metadata"] - null_default_fields = [] - + optional_fields = set(["page", "page_size", "metadata"]) + nullable_fields = set(["metadata"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m -ListConversationsResponseTypedDict = TypeAliasType( - "ListConversationsResponseTypedDict", +AgentsAPIV1ConversationsListResponseTypedDict = TypeAliasType( + "AgentsAPIV1ConversationsListResponseTypedDict", Union[AgentConversationTypedDict, ModelConversationTypedDict], ) -ListConversationsResponse = TypeAliasType( - "ListConversationsResponse", Union[AgentConversation, ModelConversation] +AgentsAPIV1ConversationsListResponse = TypeAliasType( + "AgentsAPIV1ConversationsListResponse", Union[AgentConversation, ModelConversation] ) diff --git a/src/mistralai/client/models/getconversationmessagesop.py b/src/mistralai/client/models/agents_api_v1_conversations_messagesop.py similarity index 78% rename from src/mistralai/client/models/getconversationmessagesop.py rename to src/mistralai/client/models/agents_api_v1_conversations_messagesop.py index 6666198e..19978a19 100644 --- a/src/mistralai/client/models/getconversationmessagesop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_messagesop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: bb8a90ba7c22 +# @generated-id: b5141764a708 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetConversationMessagesRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsMessagesRequestTypedDict(TypedDict): conversation_id: str r"""ID of the conversation from which we are fetching messages.""" -class GetConversationMessagesRequest(BaseModel): +class AgentsAPIV1ConversationsMessagesRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/restartconversationstreamop.py b/src/mistralai/client/models/agents_api_v1_conversations_restart_streamop.py similarity index 85% rename from src/mistralai/client/models/restartconversationstreamop.py rename to src/mistralai/client/models/agents_api_v1_conversations_restart_streamop.py index 3b2025f5..63c74449 100644 --- a/src/mistralai/client/models/restartconversationstreamop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_restart_streamop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 16dc9ee5bf22 +# @generated-id: c284a1711148 from __future__ import annotations from .conversationrestartstreamrequest import ( @@ -11,13 +11,13 @@ from typing_extensions import Annotated, TypedDict -class RestartConversationStreamRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsRestartStreamRequestTypedDict(TypedDict): conversation_id: str r"""ID of the original conversation which is being restarted.""" conversation_restart_stream_request: ConversationRestartStreamRequestTypedDict -class RestartConversationStreamRequest(BaseModel): +class AgentsAPIV1ConversationsRestartStreamRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/restartconversationop.py b/src/mistralai/client/models/agents_api_v1_conversations_restartop.py similarity index 85% rename from src/mistralai/client/models/restartconversationop.py rename to src/mistralai/client/models/agents_api_v1_conversations_restartop.py index b09eaed5..3186d5df 100644 --- a/src/mistralai/client/models/restartconversationop.py +++ b/src/mistralai/client/models/agents_api_v1_conversations_restartop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 2f6f3e4bbfd8 +# @generated-id: 3ba234e5a8fc from __future__ import annotations from .conversationrestartrequest import ( @@ -11,13 +11,13 @@ from typing_extensions import Annotated, TypedDict -class RestartConversationRequestTypedDict(TypedDict): +class AgentsAPIV1ConversationsRestartRequestTypedDict(TypedDict): conversation_id: str r"""ID of the original conversation which is being restarted.""" conversation_restart_request: ConversationRestartRequestTypedDict -class RestartConversationRequest(BaseModel): +class AgentsAPIV1ConversationsRestartRequest(BaseModel): conversation_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/agentscompletionrequest.py b/src/mistralai/client/models/agentscompletionrequest.py index f4a2d646..6955f6ac 100644 --- a/src/mistralai/client/models/agentscompletionrequest.py +++ b/src/mistralai/client/models/agentscompletionrequest.py @@ -148,52 +148,44 @@ class AgentsCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - ] - nullable_fields = [ - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + ] + ) + nullable_fields = set( + ["max_tokens", "random_seed", "metadata", "tools", "n", "prompt_mode"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/agentscompletionstreamrequest.py b/src/mistralai/client/models/agentscompletionstreamrequest.py index 732e2402..c2cf3552 100644 --- a/src/mistralai/client/models/agentscompletionstreamrequest.py +++ b/src/mistralai/client/models/agentscompletionstreamrequest.py @@ -146,52 +146,44 @@ class AgentsCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - ] - nullable_fields = [ - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + ] + ) + nullable_fields = set( + ["max_tokens", "random_seed", "metadata", "tools", "n", "prompt_mode"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/archiveftmodelout.py b/src/mistralai/client/models/archiveftmodelout.py deleted file mode 100644 index 3107116c..00000000 --- a/src/mistralai/client/models/archiveftmodelout.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: bab499599d30 - -from __future__ import annotations -from mistralai.client.types import BaseModel -from mistralai.client.utils import validate_const -import pydantic -from pydantic.functional_validators import AfterValidator -from typing import Literal, Optional -from typing_extensions import Annotated, NotRequired, TypedDict - - -class ArchiveFTModelOutTypedDict(TypedDict): - id: str - object: Literal["model"] - archived: NotRequired[bool] - - -class ArchiveFTModelOut(BaseModel): - id: str - - OBJECT: Annotated[ - Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], - pydantic.Field(alias="object"), - ] = "model" - - archived: Optional[bool] = True diff --git a/src/mistralai/client/models/archivemodelresponse.py b/src/mistralai/client/models/archivemodelresponse.py new file mode 100644 index 00000000..f1116850 --- /dev/null +++ b/src/mistralai/client/models/archivemodelresponse.py @@ -0,0 +1,50 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 2d22c644df64 + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, NotRequired, TypedDict + + +class ArchiveModelResponseTypedDict(TypedDict): + id: str + object: Literal["model"] + archived: NotRequired[bool] + + +class ArchiveModelResponse(BaseModel): + id: str + + object: Annotated[ + Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], + pydantic.Field(alias="object"), + ] = "model" + + archived: Optional[bool] = True + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "archived"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ArchiveModelResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/assistantmessage.py b/src/mistralai/client/models/assistantmessage.py index 5a4a2085..26a778c7 100644 --- a/src/mistralai/client/models/assistantmessage.py +++ b/src/mistralai/client/models/assistantmessage.py @@ -11,9 +11,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict AssistantMessageContentTypedDict = TypeAliasType( @@ -26,18 +29,22 @@ ) -AssistantMessageRole = Literal["assistant",] - - class AssistantMessageTypedDict(TypedDict): + role: Literal["assistant"] content: NotRequired[Nullable[AssistantMessageContentTypedDict]] tool_calls: NotRequired[Nullable[List[ToolCallTypedDict]]] prefix: NotRequired[bool] r"""Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message.""" - role: NotRequired[AssistantMessageRole] class AssistantMessage(BaseModel): + role: Annotated[ + Annotated[ + Optional[Literal["assistant"]], AfterValidator(validate_const("assistant")) + ], + pydantic.Field(alias="role"), + ] = "assistant" + content: OptionalNullable[AssistantMessageContent] = UNSET tool_calls: OptionalNullable[List[ToolCall]] = UNSET @@ -45,34 +52,33 @@ class AssistantMessage(BaseModel): prefix: Optional[bool] = False r"""Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message.""" - role: Optional[AssistantMessageRole] = "assistant" - @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["content", "tool_calls", "prefix", "role"] - nullable_fields = ["content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls", "prefix"]) + nullable_fields = set(["content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AssistantMessage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/audiochunk.py b/src/mistralai/client/models/audiochunk.py index a5186827..68866cd2 100644 --- a/src/mistralai/client/models/audiochunk.py +++ b/src/mistralai/client/models/audiochunk.py @@ -18,9 +18,15 @@ class AudioChunkTypedDict(TypedDict): class AudioChunk(BaseModel): input_audio: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["input_audio"], AfterValidator(validate_const("input_audio")) ], pydantic.Field(alias="type"), ] = "input_audio" + + +try: + AudioChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/audiotranscriptionrequest.py b/src/mistralai/client/models/audiotranscriptionrequest.py index 8c47a83c..fe4c79e3 100644 --- a/src/mistralai/client/models/audiotranscriptionrequest.py +++ b/src/mistralai/client/models/audiotranscriptionrequest.py @@ -58,7 +58,7 @@ class AudioTranscriptionRequest(BaseModel): UNSET ) - STREAM: Annotated[ + stream: Annotated[ Annotated[Optional[Literal[False]], AfterValidator(validate_const(False))], pydantic.Field(alias="stream"), FieldMetadata(multipart=True), @@ -75,40 +75,43 @@ class AudioTranscriptionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "file", - "file_url", - "file_id", - "language", - "temperature", - "stream", - "diarize", - "context_bias", - "timestamp_granularities", - ] - nullable_fields = ["file_url", "file_id", "language", "temperature"] - null_default_fields = [] - + optional_fields = set( + [ + "file", + "file_url", + "file_id", + "language", + "temperature", + "stream", + "diarize", + "context_bias", + "timestamp_granularities", + ] + ) + nullable_fields = set(["file_url", "file_id", "language", "temperature"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AudioTranscriptionRequest.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/audiotranscriptionrequeststream.py b/src/mistralai/client/models/audiotranscriptionrequeststream.py index a080cee2..2d1e9269 100644 --- a/src/mistralai/client/models/audiotranscriptionrequeststream.py +++ b/src/mistralai/client/models/audiotranscriptionrequeststream.py @@ -56,7 +56,7 @@ class AudioTranscriptionRequestStream(BaseModel): UNSET ) - STREAM: Annotated[ + stream: Annotated[ Annotated[Optional[Literal[True]], AfterValidator(validate_const(True))], pydantic.Field(alias="stream"), FieldMetadata(multipart=True), @@ -73,40 +73,43 @@ class AudioTranscriptionRequestStream(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "file", - "file_url", - "file_id", - "language", - "temperature", - "stream", - "diarize", - "context_bias", - "timestamp_granularities", - ] - nullable_fields = ["file_url", "file_id", "language", "temperature"] - null_default_fields = [] - + optional_fields = set( + [ + "file", + "file_url", + "file_id", + "language", + "temperature", + "stream", + "diarize", + "context_bias", + "timestamp_granularities", + ] + ) + nullable_fields = set(["file_url", "file_id", "language", "temperature"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + AudioTranscriptionRequestStream.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/basemodelcard.py b/src/mistralai/client/models/basemodelcard.py index 17a3e5c9..9c9e9a20 100644 --- a/src/mistralai/client/models/basemodelcard.py +++ b/src/mistralai/client/models/basemodelcard.py @@ -60,54 +60,59 @@ class BaseModelCard(BaseModel): default_model_temperature: OptionalNullable[float] = UNSET - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["base"], AfterValidator(validate_const("base"))], pydantic.Field(alias="type"), ] = "base" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "created", - "owned_by", - "name", - "description", - "max_context_length", - "aliases", - "deprecation", - "deprecation_replacement_model", - "default_model_temperature", - ] - nullable_fields = [ - "name", - "description", - "deprecation", - "deprecation_replacement_model", - "default_model_temperature", - ] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "created", + "owned_by", + "name", + "description", + "max_context_length", + "aliases", + "deprecation", + "deprecation_replacement_model", + "default_model_temperature", + ] + ) + nullable_fields = set( + [ + "name", + "description", + "deprecation", + "deprecation_replacement_model", + "default_model_temperature", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + BaseModelCard.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/batcherror.py b/src/mistralai/client/models/batcherror.py index c1bf722a..8a353cd2 100644 --- a/src/mistralai/client/models/batcherror.py +++ b/src/mistralai/client/models/batcherror.py @@ -2,7 +2,8 @@ # @generated-id: 1563e2a576ec from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -16,3 +17,19 @@ class BatchError(BaseModel): message: str count: Optional[int] = 1 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["count"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/batchjobout.py b/src/mistralai/client/models/batchjob.py similarity index 64% rename from src/mistralai/client/models/batchjobout.py rename to src/mistralai/client/models/batchjob.py index 99c2b951..80acac33 100644 --- a/src/mistralai/client/models/batchjobout.py +++ b/src/mistralai/client/models/batchjob.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: cbf1d872a46e +# @generated-id: 85cd28932cc7 from __future__ import annotations from .batcherror import BatchError, BatchErrorTypedDict @@ -19,7 +19,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class BatchJobOutTypedDict(TypedDict): +class BatchJobTypedDict(TypedDict): id: str input_files: List[str] endpoint: str @@ -41,7 +41,7 @@ class BatchJobOutTypedDict(TypedDict): completed_at: NotRequired[Nullable[int]] -class BatchJobOut(BaseModel): +class BatchJob(BaseModel): id: str input_files: List[str] @@ -62,7 +62,7 @@ class BatchJobOut(BaseModel): failed_requests: int - OBJECT: Annotated[ + object: Annotated[ Annotated[Optional[Literal["batch"]], AfterValidator(validate_const("batch"))], pydantic.Field(alias="object"), ] = "batch" @@ -85,49 +85,54 @@ class BatchJobOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "metadata", - "model", - "agent_id", - "output_file", - "error_file", - "outputs", - "started_at", - "completed_at", - ] - nullable_fields = [ - "metadata", - "model", - "agent_id", - "output_file", - "error_file", - "outputs", - "started_at", - "completed_at", - ] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "metadata", + "model", + "agent_id", + "output_file", + "error_file", + "outputs", + "started_at", + "completed_at", + ] + ) + nullable_fields = set( + [ + "metadata", + "model", + "agent_id", + "output_file", + "error_file", + "outputs", + "started_at", + "completed_at", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + BatchJob.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/batchjobsout.py b/src/mistralai/client/models/batchjobsout.py deleted file mode 100644 index f65fc040..00000000 --- a/src/mistralai/client/models/batchjobsout.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 20b2516e7efa - -from __future__ import annotations -from .batchjobout import BatchJobOut, BatchJobOutTypedDict -from mistralai.client.types import BaseModel -from mistralai.client.utils import validate_const -import pydantic -from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional -from typing_extensions import Annotated, NotRequired, TypedDict - - -class BatchJobsOutTypedDict(TypedDict): - total: int - data: NotRequired[List[BatchJobOutTypedDict]] - object: Literal["list"] - - -class BatchJobsOut(BaseModel): - total: int - - data: Optional[List[BatchJobOut]] = None - - OBJECT: Annotated[ - Annotated[Optional[Literal["list"]], AfterValidator(validate_const("list"))], - pydantic.Field(alias="object"), - ] = "list" diff --git a/src/mistralai/client/models/batchrequest.py b/src/mistralai/client/models/batchrequest.py index 41c45234..911a9a05 100644 --- a/src/mistralai/client/models/batchrequest.py +++ b/src/mistralai/client/models/batchrequest.py @@ -26,30 +26,25 @@ class BatchRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["custom_id"] - nullable_fields = ["custom_id"] - null_default_fields = [] - + optional_fields = set(["custom_id"]) + nullable_fields = set(["custom_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/cancelfinetuningjobop.py b/src/mistralai/client/models/cancelfinetuningjobop.py deleted file mode 100644 index ddd445bb..00000000 --- a/src/mistralai/client/models/cancelfinetuningjobop.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: c9a1b39f0d02 - -from __future__ import annotations -from .classifierdetailedjobout import ( - ClassifierDetailedJobOut, - ClassifierDetailedJobOutTypedDict, -) -from .completiondetailedjobout import ( - CompletionDetailedJobOut, - CompletionDetailedJobOutTypedDict, -) -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -class CancelFineTuningJobRequestTypedDict(TypedDict): - job_id: str - r"""The ID of the job to cancel.""" - - -class CancelFineTuningJobRequest(BaseModel): - job_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - r"""The ID of the job to cancel.""" - - -CancelFineTuningJobResponseTypedDict = TypeAliasType( - "CancelFineTuningJobResponseTypedDict", - Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict], -) -r"""OK""" - - -CancelFineTuningJobResponse = Annotated[ - Union[ClassifierDetailedJobOut, CompletionDetailedJobOut], - Field(discriminator="JOB_TYPE"), -] -r"""OK""" diff --git a/src/mistralai/client/models/chatclassificationrequest.py b/src/mistralai/client/models/chatclassificationrequest.py index 8b6d07b9..cf2aa78a 100644 --- a/src/mistralai/client/models/chatclassificationrequest.py +++ b/src/mistralai/client/models/chatclassificationrequest.py @@ -4,18 +4,17 @@ from __future__ import annotations from .inputs import Inputs, InputsTypedDict from mistralai.client.types import BaseModel -import pydantic -from typing_extensions import Annotated, TypedDict +from typing_extensions import TypedDict class ChatClassificationRequestTypedDict(TypedDict): model: str - inputs: InputsTypedDict + input: InputsTypedDict r"""Chat to classify""" class ChatClassificationRequest(BaseModel): model: str - inputs: Annotated[Inputs, pydantic.Field(alias="input")] + input: Inputs r"""Chat to classify""" diff --git a/src/mistralai/client/models/chatcompletionrequest.py b/src/mistralai/client/models/chatcompletionrequest.py index 4f7d071b..e871bd92 100644 --- a/src/mistralai/client/models/chatcompletionrequest.py +++ b/src/mistralai/client/models/chatcompletionrequest.py @@ -171,56 +171,55 @@ class ChatCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - "safe_prompt", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + "safe_prompt", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/chatcompletionstreamrequest.py b/src/mistralai/client/models/chatcompletionstreamrequest.py index ec7d2ae1..b7b2bff1 100644 --- a/src/mistralai/client/models/chatcompletionstreamrequest.py +++ b/src/mistralai/client/models/chatcompletionstreamrequest.py @@ -169,56 +169,55 @@ class ChatCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "response_format", - "tools", - "tool_choice", - "presence_penalty", - "frequency_penalty", - "n", - "prediction", - "parallel_tool_calls", - "prompt_mode", - "safe_prompt", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "tools", - "n", - "prompt_mode", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", + "safe_prompt", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "tools", + "n", + "prompt_mode", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/chatmoderationrequest.py b/src/mistralai/client/models/chatmoderationrequest.py index a8d021e8..228e7d26 100644 --- a/src/mistralai/client/models/chatmoderationrequest.py +++ b/src/mistralai/client/models/chatmoderationrequest.py @@ -86,3 +86,9 @@ class ChatModerationRequest(BaseModel): r"""Chat to classify""" model: str + + +try: + ChatModerationRequest.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/checkpointout.py b/src/mistralai/client/models/checkpoint.py similarity index 81% rename from src/mistralai/client/models/checkpointout.py rename to src/mistralai/client/models/checkpoint.py index 3e8d90e9..c24e433e 100644 --- a/src/mistralai/client/models/checkpointout.py +++ b/src/mistralai/client/models/checkpoint.py @@ -1,14 +1,14 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 3866fe32cd7c +# @generated-id: 1a530d3674d8 from __future__ import annotations -from .metricout import MetricOut, MetricOutTypedDict +from .metric import Metric, MetricTypedDict from mistralai.client.types import BaseModel from typing_extensions import TypedDict -class CheckpointOutTypedDict(TypedDict): - metrics: MetricOutTypedDict +class CheckpointTypedDict(TypedDict): + metrics: MetricTypedDict r"""Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase).""" step_number: int r"""The step number that the checkpoint was created at.""" @@ -16,8 +16,8 @@ class CheckpointOutTypedDict(TypedDict): r"""The UNIX timestamp (in seconds) for when the checkpoint was created.""" -class CheckpointOut(BaseModel): - metrics: MetricOut +class Checkpoint(BaseModel): + metrics: Metric r"""Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase).""" step_number: int diff --git a/src/mistralai/client/models/classificationrequest.py b/src/mistralai/client/models/classificationrequest.py index 903706c3..25b69413 100644 --- a/src/mistralai/client/models/classificationrequest.py +++ b/src/mistralai/client/models/classificationrequest.py @@ -46,30 +46,31 @@ class ClassificationRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["metadata"] - nullable_fields = ["metadata"] - null_default_fields = [] - + optional_fields = set(["metadata"]) + nullable_fields = set(["metadata"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ClassificationRequest.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/classifierdetailedjobout.py b/src/mistralai/client/models/classifierdetailedjobout.py deleted file mode 100644 index bc5c5381..00000000 --- a/src/mistralai/client/models/classifierdetailedjobout.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d8daeb39ef9f - -from __future__ import annotations -from .checkpointout import CheckpointOut, CheckpointOutTypedDict -from .classifiertargetout import ClassifierTargetOut, ClassifierTargetOutTypedDict -from .classifiertrainingparameters import ( - ClassifierTrainingParameters, - ClassifierTrainingParametersTypedDict, -) -from .eventout import EventOut, EventOutTypedDict -from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict -from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, - UnrecognizedStr, -) -from mistralai.client.utils import validate_const -import pydantic -from pydantic import model_serializer -from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict - - -ClassifierDetailedJobOutStatus = Union[ - Literal[ - "QUEUED", - "STARTED", - "VALIDATING", - "VALIDATED", - "RUNNING", - "FAILED_VALIDATION", - "FAILED", - "SUCCESS", - "CANCELLED", - "CANCELLATION_REQUESTED", - ], - UnrecognizedStr, -] - - -ClassifierDetailedJobOutIntegrationTypedDict = WandbIntegrationOutTypedDict - - -ClassifierDetailedJobOutIntegration = WandbIntegrationOut - - -class ClassifierDetailedJobOutTypedDict(TypedDict): - id: str - auto_start: bool - model: str - status: ClassifierDetailedJobOutStatus - created_at: int - modified_at: int - training_files: List[str] - hyperparameters: ClassifierTrainingParametersTypedDict - classifier_targets: List[ClassifierTargetOutTypedDict] - validation_files: NotRequired[Nullable[List[str]]] - object: Literal["job"] - fine_tuned_model: NotRequired[Nullable[str]] - suffix: NotRequired[Nullable[str]] - integrations: NotRequired[ - Nullable[List[ClassifierDetailedJobOutIntegrationTypedDict]] - ] - trained_tokens: NotRequired[Nullable[int]] - metadata: NotRequired[Nullable[JobMetadataOutTypedDict]] - job_type: Literal["classifier"] - events: NotRequired[List[EventOutTypedDict]] - r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" - checkpoints: NotRequired[List[CheckpointOutTypedDict]] - - -class ClassifierDetailedJobOut(BaseModel): - id: str - - auto_start: bool - - model: str - - status: ClassifierDetailedJobOutStatus - - created_at: int - - modified_at: int - - training_files: List[str] - - hyperparameters: ClassifierTrainingParameters - - classifier_targets: List[ClassifierTargetOut] - - validation_files: OptionalNullable[List[str]] = UNSET - - OBJECT: Annotated[ - Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], - pydantic.Field(alias="object"), - ] = "job" - - fine_tuned_model: OptionalNullable[str] = UNSET - - suffix: OptionalNullable[str] = UNSET - - integrations: OptionalNullable[List[ClassifierDetailedJobOutIntegration]] = UNSET - - trained_tokens: OptionalNullable[int] = UNSET - - metadata: OptionalNullable[JobMetadataOut] = UNSET - - JOB_TYPE: Annotated[ - Annotated[Literal["classifier"], AfterValidator(validate_const("classifier"))], - pydantic.Field(alias="job_type"), - ] = "classifier" - - events: Optional[List[EventOut]] = None - r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" - - checkpoints: Optional[List[CheckpointOut]] = None - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [ - "validation_files", - "object", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - "events", - "checkpoints", - ] - nullable_fields = [ - "validation_files", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - ] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/classifierftmodelout.py b/src/mistralai/client/models/classifierfinetunedmodel.py similarity index 56% rename from src/mistralai/client/models/classifierftmodelout.py rename to src/mistralai/client/models/classifierfinetunedmodel.py index 182f4954..fbcf5892 100644 --- a/src/mistralai/client/models/classifierftmodelout.py +++ b/src/mistralai/client/models/classifierfinetunedmodel.py @@ -1,11 +1,14 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 2903a7123b06 +# @generated-id: 5a9a7a0153c8 from __future__ import annotations -from .classifiertargetout import ClassifierTargetOut, ClassifierTargetOutTypedDict -from .ftmodelcapabilitiesout import ( - FTModelCapabilitiesOut, - FTModelCapabilitiesOutTypedDict, +from .classifiertargetresult import ( + ClassifierTargetResult, + ClassifierTargetResultTypedDict, +) +from .finetunedmodelcapabilities import ( + FineTunedModelCapabilities, + FineTunedModelCapabilitiesTypedDict, ) from mistralai.client.types import ( BaseModel, @@ -22,7 +25,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class ClassifierFTModelOutTypedDict(TypedDict): +class ClassifierFineTunedModelTypedDict(TypedDict): id: str created: int owned_by: str @@ -30,9 +33,9 @@ class ClassifierFTModelOutTypedDict(TypedDict): root: str root_version: str archived: bool - capabilities: FTModelCapabilitiesOutTypedDict + capabilities: FineTunedModelCapabilitiesTypedDict job: str - classifier_targets: List[ClassifierTargetOutTypedDict] + classifier_targets: List[ClassifierTargetResultTypedDict] object: Literal["model"] name: NotRequired[Nullable[str]] description: NotRequired[Nullable[str]] @@ -41,7 +44,7 @@ class ClassifierFTModelOutTypedDict(TypedDict): model_type: Literal["classifier"] -class ClassifierFTModelOut(BaseModel): +class ClassifierFineTunedModel(BaseModel): id: str created: int @@ -56,13 +59,13 @@ class ClassifierFTModelOut(BaseModel): archived: bool - capabilities: FTModelCapabilitiesOut + capabilities: FineTunedModelCapabilities job: str - classifier_targets: List[ClassifierTargetOut] + classifier_targets: List[ClassifierTargetResult] - OBJECT: Annotated[ + object: Annotated[ Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], pydantic.Field(alias="object"), ] = "model" @@ -75,43 +78,40 @@ class ClassifierFTModelOut(BaseModel): aliases: Optional[List[str]] = None - MODEL_TYPE: Annotated[ + model_type: Annotated[ Annotated[Literal["classifier"], AfterValidator(validate_const("classifier"))], pydantic.Field(alias="model_type"), ] = "classifier" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "name", - "description", - "max_context_length", - "aliases", - ] - nullable_fields = ["name", "description"] - null_default_fields = [] - + optional_fields = set( + ["object", "name", "description", "max_context_length", "aliases"] + ) + nullable_fields = set(["name", "description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ClassifierFineTunedModel.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/classifierjobout.py b/src/mistralai/client/models/classifierfinetuningjob.py similarity index 63% rename from src/mistralai/client/models/classifierjobout.py rename to src/mistralai/client/models/classifierfinetuningjob.py index 03a5b11c..fb160cf8 100644 --- a/src/mistralai/client/models/classifierjobout.py +++ b/src/mistralai/client/models/classifierfinetuningjob.py @@ -1,13 +1,16 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: e19e9c4416cc +# @generated-id: a244d5f2afc5 from __future__ import annotations from .classifiertrainingparameters import ( ClassifierTrainingParameters, ClassifierTrainingParametersTypedDict, ) -from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict -from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict +from .jobmetadata import JobMetadata, JobMetadataTypedDict +from .wandbintegrationresult import ( + WandbIntegrationResult, + WandbIntegrationResultTypedDict, +) from mistralai.client.types import ( BaseModel, Nullable, @@ -18,13 +21,13 @@ ) from mistralai.client.utils import validate_const import pydantic -from pydantic import model_serializer +from pydantic import ConfigDict, model_serializer from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional, Union +from typing import Any, List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypedDict -ClassifierJobOutStatus = Union[ +ClassifierFineTuningJobStatus = Union[ Literal[ "QUEUED", "STARTED", @@ -42,18 +45,33 @@ r"""The current status of the fine-tuning job.""" -ClassifierJobOutIntegrationTypedDict = WandbIntegrationOutTypedDict +ClassifierFineTuningJobIntegrationTypedDict = WandbIntegrationResultTypedDict + + +class UnknownClassifierFineTuningJobIntegration(BaseModel): + r"""A ClassifierFineTuningJobIntegration variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + +_CLASSIFIER_FINE_TUNING_JOB_INTEGRATION_VARIANTS: dict[str, Any] = { + "wandb": WandbIntegrationResult, +} -ClassifierJobOutIntegration = WandbIntegrationOut +ClassifierFineTuningJobIntegration = WandbIntegrationResult -class ClassifierJobOutTypedDict(TypedDict): + +class ClassifierFineTuningJobTypedDict(TypedDict): id: str r"""The ID of the job.""" auto_start: bool model: str - status: ClassifierJobOutStatus + status: ClassifierFineTuningJobStatus r"""The current status of the fine-tuning job.""" created_at: int r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created.""" @@ -70,16 +88,18 @@ class ClassifierJobOutTypedDict(TypedDict): r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running.""" suffix: NotRequired[Nullable[str]] r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`.""" - integrations: NotRequired[Nullable[List[ClassifierJobOutIntegrationTypedDict]]] + integrations: NotRequired[ + Nullable[List[ClassifierFineTuningJobIntegrationTypedDict]] + ] r"""A list of integrations enabled for your fine-tuning job.""" trained_tokens: NotRequired[Nullable[int]] r"""Total number of tokens trained.""" - metadata: NotRequired[Nullable[JobMetadataOutTypedDict]] + metadata: NotRequired[Nullable[JobMetadataTypedDict]] job_type: Literal["classifier"] r"""The type of job (`FT` for fine-tuning).""" -class ClassifierJobOut(BaseModel): +class ClassifierFineTuningJob(BaseModel): id: str r"""The ID of the job.""" @@ -87,7 +107,7 @@ class ClassifierJobOut(BaseModel): model: str - status: ClassifierJobOutStatus + status: ClassifierFineTuningJobStatus r"""The current status of the fine-tuning job.""" created_at: int @@ -104,7 +124,7 @@ class ClassifierJobOut(BaseModel): validation_files: OptionalNullable[List[str]] = UNSET r"""A list containing the IDs of uploaded files that contain validation data.""" - OBJECT: Annotated[ + object: Annotated[ Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], pydantic.Field(alias="object"), ] = "job" @@ -116,15 +136,15 @@ class ClassifierJobOut(BaseModel): suffix: OptionalNullable[str] = UNSET r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`.""" - integrations: OptionalNullable[List[ClassifierJobOutIntegration]] = UNSET + integrations: OptionalNullable[List[ClassifierFineTuningJobIntegration]] = UNSET r"""A list of integrations enabled for your fine-tuning job.""" trained_tokens: OptionalNullable[int] = UNSET r"""Total number of tokens trained.""" - metadata: OptionalNullable[JobMetadataOut] = UNSET + metadata: OptionalNullable[JobMetadata] = UNSET - JOB_TYPE: Annotated[ + job_type: Annotated[ Annotated[Literal["classifier"], AfterValidator(validate_const("classifier"))], pydantic.Field(alias="job_type"), ] = "classifier" @@ -132,45 +152,50 @@ class ClassifierJobOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "validation_files", - "object", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - ] - nullable_fields = [ - "validation_files", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - ] - null_default_fields = [] - + optional_fields = set( + [ + "validation_files", + "object", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + ] + ) + nullable_fields = set( + [ + "validation_files", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ClassifierFineTuningJob.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/classifierfinetuningjobdetails.py b/src/mistralai/client/models/classifierfinetuningjobdetails.py new file mode 100644 index 00000000..5d73f55e --- /dev/null +++ b/src/mistralai/client/models/classifierfinetuningjobdetails.py @@ -0,0 +1,197 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 75c5dee8df2e + +from __future__ import annotations +from .checkpoint import Checkpoint, CheckpointTypedDict +from .classifiertargetresult import ( + ClassifierTargetResult, + ClassifierTargetResultTypedDict, +) +from .classifiertrainingparameters import ( + ClassifierTrainingParameters, + ClassifierTrainingParametersTypedDict, +) +from .event import Event, EventTypedDict +from .jobmetadata import JobMetadata, JobMetadataTypedDict +from .wandbintegrationresult import ( + WandbIntegrationResult, + WandbIntegrationResultTypedDict, +) +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, + UnrecognizedStr, +) +from mistralai.client.utils import validate_const +import pydantic +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Any, List, Literal, Optional, Union +from typing_extensions import Annotated, NotRequired, TypedDict + + +ClassifierFineTuningJobDetailsStatus = Union[ + Literal[ + "QUEUED", + "STARTED", + "VALIDATING", + "VALIDATED", + "RUNNING", + "FAILED_VALIDATION", + "FAILED", + "SUCCESS", + "CANCELLED", + "CANCELLATION_REQUESTED", + ], + UnrecognizedStr, +] + + +ClassifierFineTuningJobDetailsIntegrationTypedDict = WandbIntegrationResultTypedDict + + +class UnknownClassifierFineTuningJobDetailsIntegration(BaseModel): + r"""A ClassifierFineTuningJobDetailsIntegration variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_CLASSIFIER_FINE_TUNING_JOB_DETAILS_INTEGRATION_VARIANTS: dict[str, Any] = { + "wandb": WandbIntegrationResult, +} + + +ClassifierFineTuningJobDetailsIntegration = WandbIntegrationResult + + +class ClassifierFineTuningJobDetailsTypedDict(TypedDict): + id: str + auto_start: bool + model: str + status: ClassifierFineTuningJobDetailsStatus + created_at: int + modified_at: int + training_files: List[str] + hyperparameters: ClassifierTrainingParametersTypedDict + classifier_targets: List[ClassifierTargetResultTypedDict] + validation_files: NotRequired[Nullable[List[str]]] + object: Literal["job"] + fine_tuned_model: NotRequired[Nullable[str]] + suffix: NotRequired[Nullable[str]] + integrations: NotRequired[ + Nullable[List[ClassifierFineTuningJobDetailsIntegrationTypedDict]] + ] + trained_tokens: NotRequired[Nullable[int]] + metadata: NotRequired[Nullable[JobMetadataTypedDict]] + job_type: Literal["classifier"] + events: NotRequired[List[EventTypedDict]] + r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" + checkpoints: NotRequired[List[CheckpointTypedDict]] + + +class ClassifierFineTuningJobDetails(BaseModel): + id: str + + auto_start: bool + + model: str + + status: ClassifierFineTuningJobDetailsStatus + + created_at: int + + modified_at: int + + training_files: List[str] + + hyperparameters: ClassifierTrainingParameters + + classifier_targets: List[ClassifierTargetResult] + + validation_files: OptionalNullable[List[str]] = UNSET + + object: Annotated[ + Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], + pydantic.Field(alias="object"), + ] = "job" + + fine_tuned_model: OptionalNullable[str] = UNSET + + suffix: OptionalNullable[str] = UNSET + + integrations: OptionalNullable[List[ClassifierFineTuningJobDetailsIntegration]] = ( + UNSET + ) + + trained_tokens: OptionalNullable[int] = UNSET + + metadata: OptionalNullable[JobMetadata] = UNSET + + job_type: Annotated[ + Annotated[Literal["classifier"], AfterValidator(validate_const("classifier"))], + pydantic.Field(alias="job_type"), + ] = "classifier" + + events: Optional[List[Event]] = None + r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" + + checkpoints: Optional[List[Checkpoint]] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "validation_files", + "object", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + "events", + "checkpoints", + ] + ) + nullable_fields = set( + [ + "validation_files", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + ] + ) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + ClassifierFineTuningJobDetails.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/classifiertargetin.py b/src/mistralai/client/models/classifiertarget.py similarity index 55% rename from src/mistralai/client/models/classifiertargetin.py rename to src/mistralai/client/models/classifiertarget.py index b250109b..4d66d789 100644 --- a/src/mistralai/client/models/classifiertargetin.py +++ b/src/mistralai/client/models/classifiertarget.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ed021de1c06c +# @generated-id: 2177d51d9dcf from __future__ import annotations from .ftclassifierlossfunction import FTClassifierLossFunction @@ -15,14 +15,14 @@ from typing_extensions import NotRequired, TypedDict -class ClassifierTargetInTypedDict(TypedDict): +class ClassifierTargetTypedDict(TypedDict): name: str labels: List[str] weight: NotRequired[float] loss_function: NotRequired[Nullable[FTClassifierLossFunction]] -class ClassifierTargetIn(BaseModel): +class ClassifierTarget(BaseModel): name: str labels: List[str] @@ -33,30 +33,25 @@ class ClassifierTargetIn(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["weight", "loss_function"] - nullable_fields = ["loss_function"] - null_default_fields = [] - + optional_fields = set(["weight", "loss_function"]) + nullable_fields = set(["loss_function"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/classifiertargetout.py b/src/mistralai/client/models/classifiertargetresult.py similarity index 79% rename from src/mistralai/client/models/classifiertargetout.py rename to src/mistralai/client/models/classifiertargetresult.py index 3d41a4d9..8ce7c0ca 100644 --- a/src/mistralai/client/models/classifiertargetout.py +++ b/src/mistralai/client/models/classifiertargetresult.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 5131f55abefe +# @generated-id: 19c343844888 from __future__ import annotations from .ftclassifierlossfunction import FTClassifierLossFunction @@ -8,14 +8,14 @@ from typing_extensions import TypedDict -class ClassifierTargetOutTypedDict(TypedDict): +class ClassifierTargetResultTypedDict(TypedDict): name: str labels: List[str] weight: float loss_function: FTClassifierLossFunction -class ClassifierTargetOut(BaseModel): +class ClassifierTargetResult(BaseModel): name: str labels: List[str] diff --git a/src/mistralai/client/models/classifiertrainingparameters.py b/src/mistralai/client/models/classifiertrainingparameters.py index f360eda5..14fa4926 100644 --- a/src/mistralai/client/models/classifiertrainingparameters.py +++ b/src/mistralai/client/models/classifiertrainingparameters.py @@ -38,43 +38,36 @@ class ClassifierTrainingParameters(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "training_steps", - "learning_rate", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - ] - nullable_fields = [ - "training_steps", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - ] - null_default_fields = [] - + optional_fields = set( + [ + "training_steps", + "learning_rate", + "weight_decay", + "warmup_fraction", + "epochs", + "seq_len", + ] + ) + nullable_fields = set( + ["training_steps", "weight_decay", "warmup_fraction", "epochs", "seq_len"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/classifiertrainingparametersin.py b/src/mistralai/client/models/classifiertrainingparametersin.py deleted file mode 100644 index 85360a7e..00000000 --- a/src/mistralai/client/models/classifiertrainingparametersin.py +++ /dev/null @@ -1,92 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4b33d5cf0345 - -from __future__ import annotations -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, -) -from pydantic import model_serializer -from typing import Optional -from typing_extensions import NotRequired, TypedDict - - -class ClassifierTrainingParametersInTypedDict(TypedDict): - r"""The fine-tuning hyperparameter settings used in a classifier fine-tune job.""" - - training_steps: NotRequired[Nullable[int]] - r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset.""" - learning_rate: NotRequired[float] - r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process.""" - weight_decay: NotRequired[Nullable[float]] - r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large.""" - warmup_fraction: NotRequired[Nullable[float]] - r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)""" - epochs: NotRequired[Nullable[float]] - seq_len: NotRequired[Nullable[int]] - - -class ClassifierTrainingParametersIn(BaseModel): - r"""The fine-tuning hyperparameter settings used in a classifier fine-tune job.""" - - training_steps: OptionalNullable[int] = UNSET - r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset.""" - - learning_rate: Optional[float] = 0.0001 - r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process.""" - - weight_decay: OptionalNullable[float] = UNSET - r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large.""" - - warmup_fraction: OptionalNullable[float] = UNSET - r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)""" - - epochs: OptionalNullable[float] = UNSET - - seq_len: OptionalNullable[int] = UNSET - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [ - "training_steps", - "learning_rate", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - ] - nullable_fields = [ - "training_steps", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - ] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/codeinterpretertool.py b/src/mistralai/client/models/codeinterpretertool.py index f69c7a57..ce14265f 100644 --- a/src/mistralai/client/models/codeinterpretertool.py +++ b/src/mistralai/client/models/codeinterpretertool.py @@ -2,23 +2,65 @@ # @generated-id: 950cd8f4ad49 from __future__ import annotations -from mistralai.client.types import BaseModel +from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, TypedDict class CodeInterpreterToolTypedDict(TypedDict): + tool_configuration: NotRequired[Nullable[ToolConfigurationTypedDict]] type: Literal["code_interpreter"] class CodeInterpreterTool(BaseModel): - TYPE: Annotated[ + tool_configuration: OptionalNullable[ToolConfiguration] = UNSET + + type: Annotated[ Annotated[ Literal["code_interpreter"], AfterValidator(validate_const("code_interpreter")), ], pydantic.Field(alias="type"), ] = "code_interpreter" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["tool_configuration"]) + nullable_fields = set(["tool_configuration"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + CodeInterpreterTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/completionargs.py b/src/mistralai/client/models/completionargs.py index 918832ac..ab5cf5ff 100644 --- a/src/mistralai/client/models/completionargs.py +++ b/src/mistralai/client/models/completionargs.py @@ -58,51 +58,50 @@ class CompletionArgs(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "stop", - "presence_penalty", - "frequency_penalty", - "temperature", - "top_p", - "max_tokens", - "random_seed", - "prediction", - "response_format", - "tool_choice", - ] - nullable_fields = [ - "stop", - "presence_penalty", - "frequency_penalty", - "temperature", - "top_p", - "max_tokens", - "random_seed", - "prediction", - "response_format", - ] - null_default_fields = [] - + optional_fields = set( + [ + "stop", + "presence_penalty", + "frequency_penalty", + "temperature", + "top_p", + "max_tokens", + "random_seed", + "prediction", + "response_format", + "tool_choice", + ] + ) + nullable_fields = set( + [ + "stop", + "presence_penalty", + "frequency_penalty", + "temperature", + "top_p", + "max_tokens", + "random_seed", + "prediction", + "response_format", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/completionchunk.py b/src/mistralai/client/models/completionchunk.py index 67f447d0..5fd6c173 100644 --- a/src/mistralai/client/models/completionchunk.py +++ b/src/mistralai/client/models/completionchunk.py @@ -7,7 +7,8 @@ CompletionResponseStreamChoiceTypedDict, ) from .usageinfo import UsageInfo, UsageInfoTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import List, Optional from typing_extensions import NotRequired, TypedDict @@ -33,3 +34,19 @@ class CompletionChunk(BaseModel): created: Optional[int] = None usage: Optional[UsageInfo] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "created", "usage"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/completiondetailedjobout.py b/src/mistralai/client/models/completiondetailedjobout.py deleted file mode 100644 index cd3a86ee..00000000 --- a/src/mistralai/client/models/completiondetailedjobout.py +++ /dev/null @@ -1,176 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 9bc38dcfbddf - -from __future__ import annotations -from .checkpointout import CheckpointOut, CheckpointOutTypedDict -from .completiontrainingparameters import ( - CompletionTrainingParameters, - CompletionTrainingParametersTypedDict, -) -from .eventout import EventOut, EventOutTypedDict -from .githubrepositoryout import GithubRepositoryOut, GithubRepositoryOutTypedDict -from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict -from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, - UnrecognizedStr, -) -from mistralai.client.utils import validate_const -import pydantic -from pydantic import model_serializer -from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict - - -CompletionDetailedJobOutStatus = Union[ - Literal[ - "QUEUED", - "STARTED", - "VALIDATING", - "VALIDATED", - "RUNNING", - "FAILED_VALIDATION", - "FAILED", - "SUCCESS", - "CANCELLED", - "CANCELLATION_REQUESTED", - ], - UnrecognizedStr, -] - - -CompletionDetailedJobOutIntegrationTypedDict = WandbIntegrationOutTypedDict - - -CompletionDetailedJobOutIntegration = WandbIntegrationOut - - -CompletionDetailedJobOutRepositoryTypedDict = GithubRepositoryOutTypedDict - - -CompletionDetailedJobOutRepository = GithubRepositoryOut - - -class CompletionDetailedJobOutTypedDict(TypedDict): - id: str - auto_start: bool - model: str - status: CompletionDetailedJobOutStatus - created_at: int - modified_at: int - training_files: List[str] - hyperparameters: CompletionTrainingParametersTypedDict - validation_files: NotRequired[Nullable[List[str]]] - object: Literal["job"] - fine_tuned_model: NotRequired[Nullable[str]] - suffix: NotRequired[Nullable[str]] - integrations: NotRequired[ - Nullable[List[CompletionDetailedJobOutIntegrationTypedDict]] - ] - trained_tokens: NotRequired[Nullable[int]] - metadata: NotRequired[Nullable[JobMetadataOutTypedDict]] - job_type: Literal["completion"] - repositories: NotRequired[List[CompletionDetailedJobOutRepositoryTypedDict]] - events: NotRequired[List[EventOutTypedDict]] - r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" - checkpoints: NotRequired[List[CheckpointOutTypedDict]] - - -class CompletionDetailedJobOut(BaseModel): - id: str - - auto_start: bool - - model: str - - status: CompletionDetailedJobOutStatus - - created_at: int - - modified_at: int - - training_files: List[str] - - hyperparameters: CompletionTrainingParameters - - validation_files: OptionalNullable[List[str]] = UNSET - - OBJECT: Annotated[ - Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], - pydantic.Field(alias="object"), - ] = "job" - - fine_tuned_model: OptionalNullable[str] = UNSET - - suffix: OptionalNullable[str] = UNSET - - integrations: OptionalNullable[List[CompletionDetailedJobOutIntegration]] = UNSET - - trained_tokens: OptionalNullable[int] = UNSET - - metadata: OptionalNullable[JobMetadataOut] = UNSET - - JOB_TYPE: Annotated[ - Annotated[Literal["completion"], AfterValidator(validate_const("completion"))], - pydantic.Field(alias="job_type"), - ] = "completion" - - repositories: Optional[List[CompletionDetailedJobOutRepository]] = None - - events: Optional[List[EventOut]] = None - r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" - - checkpoints: Optional[List[CheckpointOut]] = None - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [ - "validation_files", - "object", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - "repositories", - "events", - "checkpoints", - ] - nullable_fields = [ - "validation_files", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - ] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/completionftmodelout.py b/src/mistralai/client/models/completionfinetunedmodel.py similarity index 60% rename from src/mistralai/client/models/completionftmodelout.py rename to src/mistralai/client/models/completionfinetunedmodel.py index 7ecbf54a..54a1c165 100644 --- a/src/mistralai/client/models/completionftmodelout.py +++ b/src/mistralai/client/models/completionfinetunedmodel.py @@ -1,10 +1,10 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 0f5277833b3e +# @generated-id: f08c10d149f5 from __future__ import annotations -from .ftmodelcapabilitiesout import ( - FTModelCapabilitiesOut, - FTModelCapabilitiesOutTypedDict, +from .finetunedmodelcapabilities import ( + FineTunedModelCapabilities, + FineTunedModelCapabilitiesTypedDict, ) from mistralai.client.types import ( BaseModel, @@ -21,7 +21,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class CompletionFTModelOutTypedDict(TypedDict): +class CompletionFineTunedModelTypedDict(TypedDict): id: str created: int owned_by: str @@ -29,7 +29,7 @@ class CompletionFTModelOutTypedDict(TypedDict): root: str root_version: str archived: bool - capabilities: FTModelCapabilitiesOutTypedDict + capabilities: FineTunedModelCapabilitiesTypedDict job: str object: Literal["model"] name: NotRequired[Nullable[str]] @@ -39,7 +39,7 @@ class CompletionFTModelOutTypedDict(TypedDict): model_type: Literal["completion"] -class CompletionFTModelOut(BaseModel): +class CompletionFineTunedModel(BaseModel): id: str created: int @@ -54,11 +54,11 @@ class CompletionFTModelOut(BaseModel): archived: bool - capabilities: FTModelCapabilitiesOut + capabilities: FineTunedModelCapabilities job: str - OBJECT: Annotated[ + object: Annotated[ Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], pydantic.Field(alias="object"), ] = "model" @@ -71,43 +71,40 @@ class CompletionFTModelOut(BaseModel): aliases: Optional[List[str]] = None - MODEL_TYPE: Annotated[ + model_type: Annotated[ Annotated[Literal["completion"], AfterValidator(validate_const("completion"))], pydantic.Field(alias="model_type"), ] = "completion" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "name", - "description", - "max_context_length", - "aliases", - ] - nullable_fields = ["name", "description"] - null_default_fields = [] - + optional_fields = set( + ["object", "name", "description", "max_context_length", "aliases"] + ) + nullable_fields = set(["name", "description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + CompletionFineTunedModel.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/completionjobout.py b/src/mistralai/client/models/completionfinetuningjob.py similarity index 56% rename from src/mistralai/client/models/completionjobout.py rename to src/mistralai/client/models/completionfinetuningjob.py index 42e5f6c6..1bf0a730 100644 --- a/src/mistralai/client/models/completionjobout.py +++ b/src/mistralai/client/models/completionfinetuningjob.py @@ -1,14 +1,17 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 712e6c524f9a +# @generated-id: c242237efe9b from __future__ import annotations from .completiontrainingparameters import ( CompletionTrainingParameters, CompletionTrainingParametersTypedDict, ) -from .githubrepositoryout import GithubRepositoryOut, GithubRepositoryOutTypedDict -from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict -from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict +from .githubrepository import GithubRepository, GithubRepositoryTypedDict +from .jobmetadata import JobMetadata, JobMetadataTypedDict +from .wandbintegrationresult import ( + WandbIntegrationResult, + WandbIntegrationResultTypedDict, +) from mistralai.client.types import ( BaseModel, Nullable, @@ -19,13 +22,13 @@ ) from mistralai.client.utils import validate_const import pydantic -from pydantic import model_serializer +from pydantic import ConfigDict, model_serializer from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional, Union +from typing import Any, List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypedDict -CompletionJobOutStatus = Union[ +CompletionFineTuningJobStatus = Union[ Literal[ "QUEUED", "STARTED", @@ -43,24 +46,54 @@ r"""The current status of the fine-tuning job.""" -CompletionJobOutIntegrationTypedDict = WandbIntegrationOutTypedDict +CompletionFineTuningJobIntegrationTypedDict = WandbIntegrationResultTypedDict + + +class UnknownCompletionFineTuningJobIntegration(BaseModel): + r"""A CompletionFineTuningJobIntegration variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_COMPLETION_FINE_TUNING_JOB_INTEGRATION_VARIANTS: dict[str, Any] = { + "wandb": WandbIntegrationResult, +} + + +CompletionFineTuningJobIntegration = WandbIntegrationResult -CompletionJobOutIntegration = WandbIntegrationOut +CompletionFineTuningJobRepositoryTypedDict = GithubRepositoryTypedDict -CompletionJobOutRepositoryTypedDict = GithubRepositoryOutTypedDict +class UnknownCompletionFineTuningJobRepository(BaseModel): + r"""A CompletionFineTuningJobRepository variant the SDK doesn't recognize. Preserves the raw payload.""" + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True -CompletionJobOutRepository = GithubRepositoryOut + model_config = ConfigDict(frozen=True) -class CompletionJobOutTypedDict(TypedDict): +_COMPLETION_FINE_TUNING_JOB_REPOSITORY_VARIANTS: dict[str, Any] = { + "github": GithubRepository, +} + + +CompletionFineTuningJobRepository = GithubRepository + + +class CompletionFineTuningJobTypedDict(TypedDict): id: str r"""The ID of the job.""" auto_start: bool model: str - status: CompletionJobOutStatus + status: CompletionFineTuningJobStatus r"""The current status of the fine-tuning job.""" created_at: int r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created.""" @@ -77,17 +110,19 @@ class CompletionJobOutTypedDict(TypedDict): r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running.""" suffix: NotRequired[Nullable[str]] r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`.""" - integrations: NotRequired[Nullable[List[CompletionJobOutIntegrationTypedDict]]] + integrations: NotRequired[ + Nullable[List[CompletionFineTuningJobIntegrationTypedDict]] + ] r"""A list of integrations enabled for your fine-tuning job.""" trained_tokens: NotRequired[Nullable[int]] r"""Total number of tokens trained.""" - metadata: NotRequired[Nullable[JobMetadataOutTypedDict]] + metadata: NotRequired[Nullable[JobMetadataTypedDict]] job_type: Literal["completion"] r"""The type of job (`FT` for fine-tuning).""" - repositories: NotRequired[List[CompletionJobOutRepositoryTypedDict]] + repositories: NotRequired[List[CompletionFineTuningJobRepositoryTypedDict]] -class CompletionJobOut(BaseModel): +class CompletionFineTuningJob(BaseModel): id: str r"""The ID of the job.""" @@ -95,7 +130,7 @@ class CompletionJobOut(BaseModel): model: str - status: CompletionJobOutStatus + status: CompletionFineTuningJobStatus r"""The current status of the fine-tuning job.""" created_at: int @@ -112,7 +147,7 @@ class CompletionJobOut(BaseModel): validation_files: OptionalNullable[List[str]] = UNSET r"""A list containing the IDs of uploaded files that contain validation data.""" - OBJECT: Annotated[ + object: Annotated[ Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], pydantic.Field(alias="object"), ] = "job" @@ -124,64 +159,69 @@ class CompletionJobOut(BaseModel): suffix: OptionalNullable[str] = UNSET r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`.""" - integrations: OptionalNullable[List[CompletionJobOutIntegration]] = UNSET + integrations: OptionalNullable[List[CompletionFineTuningJobIntegration]] = UNSET r"""A list of integrations enabled for your fine-tuning job.""" trained_tokens: OptionalNullable[int] = UNSET r"""Total number of tokens trained.""" - metadata: OptionalNullable[JobMetadataOut] = UNSET + metadata: OptionalNullable[JobMetadata] = UNSET - JOB_TYPE: Annotated[ + job_type: Annotated[ Annotated[Literal["completion"], AfterValidator(validate_const("completion"))], pydantic.Field(alias="job_type"), ] = "completion" r"""The type of job (`FT` for fine-tuning).""" - repositories: Optional[List[CompletionJobOutRepository]] = None + repositories: Optional[List[CompletionFineTuningJobRepository]] = None @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "validation_files", - "object", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - "repositories", - ] - nullable_fields = [ - "validation_files", - "fine_tuned_model", - "suffix", - "integrations", - "trained_tokens", - "metadata", - ] - null_default_fields = [] - + optional_fields = set( + [ + "validation_files", + "object", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + "repositories", + ] + ) + nullable_fields = set( + [ + "validation_files", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + CompletionFineTuningJob.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/completionfinetuningjobdetails.py b/src/mistralai/client/models/completionfinetuningjobdetails.py new file mode 100644 index 00000000..cb787021 --- /dev/null +++ b/src/mistralai/client/models/completionfinetuningjobdetails.py @@ -0,0 +1,216 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: e8379265af48 + +from __future__ import annotations +from .checkpoint import Checkpoint, CheckpointTypedDict +from .completiontrainingparameters import ( + CompletionTrainingParameters, + CompletionTrainingParametersTypedDict, +) +from .event import Event, EventTypedDict +from .githubrepository import GithubRepository, GithubRepositoryTypedDict +from .jobmetadata import JobMetadata, JobMetadataTypedDict +from .wandbintegrationresult import ( + WandbIntegrationResult, + WandbIntegrationResultTypedDict, +) +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, + UnrecognizedStr, +) +from mistralai.client.utils import validate_const +import pydantic +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Any, List, Literal, Optional, Union +from typing_extensions import Annotated, NotRequired, TypedDict + + +CompletionFineTuningJobDetailsStatus = Union[ + Literal[ + "QUEUED", + "STARTED", + "VALIDATING", + "VALIDATED", + "RUNNING", + "FAILED_VALIDATION", + "FAILED", + "SUCCESS", + "CANCELLED", + "CANCELLATION_REQUESTED", + ], + UnrecognizedStr, +] + + +CompletionFineTuningJobDetailsIntegrationTypedDict = WandbIntegrationResultTypedDict + + +class UnknownCompletionFineTuningJobDetailsIntegration(BaseModel): + r"""A CompletionFineTuningJobDetailsIntegration variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_COMPLETION_FINE_TUNING_JOB_DETAILS_INTEGRATION_VARIANTS: dict[str, Any] = { + "wandb": WandbIntegrationResult, +} + + +CompletionFineTuningJobDetailsIntegration = WandbIntegrationResult + + +CompletionFineTuningJobDetailsRepositoryTypedDict = GithubRepositoryTypedDict + + +class UnknownCompletionFineTuningJobDetailsRepository(BaseModel): + r"""A CompletionFineTuningJobDetailsRepository variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_COMPLETION_FINE_TUNING_JOB_DETAILS_REPOSITORY_VARIANTS: dict[str, Any] = { + "github": GithubRepository, +} + + +CompletionFineTuningJobDetailsRepository = GithubRepository + + +class CompletionFineTuningJobDetailsTypedDict(TypedDict): + id: str + auto_start: bool + model: str + status: CompletionFineTuningJobDetailsStatus + created_at: int + modified_at: int + training_files: List[str] + hyperparameters: CompletionTrainingParametersTypedDict + validation_files: NotRequired[Nullable[List[str]]] + object: Literal["job"] + fine_tuned_model: NotRequired[Nullable[str]] + suffix: NotRequired[Nullable[str]] + integrations: NotRequired[ + Nullable[List[CompletionFineTuningJobDetailsIntegrationTypedDict]] + ] + trained_tokens: NotRequired[Nullable[int]] + metadata: NotRequired[Nullable[JobMetadataTypedDict]] + job_type: Literal["completion"] + repositories: NotRequired[List[CompletionFineTuningJobDetailsRepositoryTypedDict]] + events: NotRequired[List[EventTypedDict]] + r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" + checkpoints: NotRequired[List[CheckpointTypedDict]] + + +class CompletionFineTuningJobDetails(BaseModel): + id: str + + auto_start: bool + + model: str + + status: CompletionFineTuningJobDetailsStatus + + created_at: int + + modified_at: int + + training_files: List[str] + + hyperparameters: CompletionTrainingParameters + + validation_files: OptionalNullable[List[str]] = UNSET + + object: Annotated[ + Annotated[Optional[Literal["job"]], AfterValidator(validate_const("job"))], + pydantic.Field(alias="object"), + ] = "job" + + fine_tuned_model: OptionalNullable[str] = UNSET + + suffix: OptionalNullable[str] = UNSET + + integrations: OptionalNullable[List[CompletionFineTuningJobDetailsIntegration]] = ( + UNSET + ) + + trained_tokens: OptionalNullable[int] = UNSET + + metadata: OptionalNullable[JobMetadata] = UNSET + + job_type: Annotated[ + Annotated[Literal["completion"], AfterValidator(validate_const("completion"))], + pydantic.Field(alias="job_type"), + ] = "completion" + + repositories: Optional[List[CompletionFineTuningJobDetailsRepository]] = None + + events: Optional[List[Event]] = None + r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here.""" + + checkpoints: Optional[List[Checkpoint]] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "validation_files", + "object", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + "repositories", + "events", + "checkpoints", + ] + ) + nullable_fields = set( + [ + "validation_files", + "fine_tuned_model", + "suffix", + "integrations", + "trained_tokens", + "metadata", + ] + ) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + CompletionFineTuningJobDetails.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/completionresponsestreamchoice.py b/src/mistralai/client/models/completionresponsestreamchoice.py index 119a9690..a52ae892 100644 --- a/src/mistralai/client/models/completionresponsestreamchoice.py +++ b/src/mistralai/client/models/completionresponsestreamchoice.py @@ -35,30 +35,14 @@ class CompletionResponseStreamChoice(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["finish_reason"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m diff --git a/src/mistralai/client/models/completiontrainingparameters.py b/src/mistralai/client/models/completiontrainingparameters.py index 4b846b1b..ca50a7ad 100644 --- a/src/mistralai/client/models/completiontrainingparameters.py +++ b/src/mistralai/client/models/completiontrainingparameters.py @@ -41,45 +41,44 @@ class CompletionTrainingParameters(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "training_steps", - "learning_rate", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - "fim_ratio", - ] - nullable_fields = [ - "training_steps", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - "fim_ratio", - ] - null_default_fields = [] - + optional_fields = set( + [ + "training_steps", + "learning_rate", + "weight_decay", + "warmup_fraction", + "epochs", + "seq_len", + "fim_ratio", + ] + ) + nullable_fields = set( + [ + "training_steps", + "weight_decay", + "warmup_fraction", + "epochs", + "seq_len", + "fim_ratio", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/completiontrainingparametersin.py b/src/mistralai/client/models/completiontrainingparametersin.py deleted file mode 100644 index 20b74ad9..00000000 --- a/src/mistralai/client/models/completiontrainingparametersin.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 0df22b873b5f - -from __future__ import annotations -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, -) -from pydantic import model_serializer -from typing import Optional -from typing_extensions import NotRequired, TypedDict - - -class CompletionTrainingParametersInTypedDict(TypedDict): - r"""The fine-tuning hyperparameter settings used in a fine-tune job.""" - - training_steps: NotRequired[Nullable[int]] - r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset.""" - learning_rate: NotRequired[float] - r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process.""" - weight_decay: NotRequired[Nullable[float]] - r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large.""" - warmup_fraction: NotRequired[Nullable[float]] - r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)""" - epochs: NotRequired[Nullable[float]] - seq_len: NotRequired[Nullable[int]] - fim_ratio: NotRequired[Nullable[float]] - - -class CompletionTrainingParametersIn(BaseModel): - r"""The fine-tuning hyperparameter settings used in a fine-tune job.""" - - training_steps: OptionalNullable[int] = UNSET - r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset.""" - - learning_rate: Optional[float] = 0.0001 - r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process.""" - - weight_decay: OptionalNullable[float] = UNSET - r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large.""" - - warmup_fraction: OptionalNullable[float] = UNSET - r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)""" - - epochs: OptionalNullable[float] = UNSET - - seq_len: OptionalNullable[int] = UNSET - - fim_ratio: OptionalNullable[float] = UNSET - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [ - "training_steps", - "learning_rate", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - "fim_ratio", - ] - nullable_fields = [ - "training_steps", - "weight_decay", - "warmup_fraction", - "epochs", - "seq_len", - "fim_ratio", - ] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/contentchunk.py b/src/mistralai/client/models/contentchunk.py index eff4b8c6..e3de7591 100644 --- a/src/mistralai/client/models/contentchunk.py +++ b/src/mistralai/client/models/contentchunk.py @@ -9,9 +9,12 @@ from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict from .thinkchunk import ThinkChunk, ThinkChunkTypedDict -from mistralai.client.utils import get_discriminator -from pydantic import Discriminator, Tag -from typing import Union +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union from typing_extensions import Annotated, TypeAliasType @@ -29,15 +32,45 @@ ) +class UnknownContentChunk(BaseModel): + r"""A ContentChunk variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_CONTENT_CHUNK_VARIANTS: dict[str, Any] = { + "image_url": ImageURLChunk, + "document_url": DocumentURLChunk, + "text": TextChunk, + "reference": ReferenceChunk, + "file": FileChunk, + "thinking": ThinkChunk, + "input_audio": AudioChunk, +} + + ContentChunk = Annotated[ Union[ - Annotated[ImageURLChunk, Tag("image_url")], - Annotated[DocumentURLChunk, Tag("document_url")], - Annotated[TextChunk, Tag("text")], - Annotated[ReferenceChunk, Tag("reference")], - Annotated[FileChunk, Tag("file")], - Annotated[ThinkChunk, Tag("thinking")], - Annotated[AudioChunk, Tag("input_audio")], + ImageURLChunk, + DocumentURLChunk, + TextChunk, + ReferenceChunk, + FileChunk, + ThinkChunk, + AudioChunk, + UnknownContentChunk, ], - Discriminator(lambda m: get_discriminator(m, "type", "type")), + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_CONTENT_CHUNK_VARIANTS, + unknown_cls=UnknownContentChunk, + union_name="ContentChunk", + ) + ), ] diff --git a/src/mistralai/client/models/conversationappendrequest.py b/src/mistralai/client/models/conversationappendrequest.py index 0f07475e..386714fd 100644 --- a/src/mistralai/client/models/conversationappendrequest.py +++ b/src/mistralai/client/models/conversationappendrequest.py @@ -4,8 +4,16 @@ from __future__ import annotations from .completionargs import CompletionArgs, CompletionArgsTypedDict from .conversationinputs import ConversationInputs, ConversationInputsTypedDict -from mistralai.client.types import BaseModel -from typing import Literal, Optional +from .toolcallconfirmation import ToolCallConfirmation, ToolCallConfirmationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import List, Literal, Optional from typing_extensions import NotRequired, TypedDict @@ -16,17 +24,18 @@ class ConversationAppendRequestTypedDict(TypedDict): - inputs: ConversationInputsTypedDict + inputs: NotRequired[ConversationInputsTypedDict] stream: NotRequired[bool] store: NotRequired[bool] r"""Whether to store the results into our servers or not.""" handoff_execution: NotRequired[ConversationAppendRequestHandoffExecution] completion_args: NotRequired[CompletionArgsTypedDict] r"""White-listed arguments from the completion API""" + tool_confirmations: NotRequired[Nullable[List[ToolCallConfirmationTypedDict]]] class ConversationAppendRequest(BaseModel): - inputs: ConversationInputs + inputs: Optional[ConversationInputs] = None stream: Optional[bool] = False @@ -37,3 +46,39 @@ class ConversationAppendRequest(BaseModel): completion_args: Optional[CompletionArgs] = None r"""White-listed arguments from the completion API""" + + tool_confirmations: OptionalNullable[List[ToolCallConfirmation]] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "inputs", + "stream", + "store", + "handoff_execution", + "completion_args", + "tool_confirmations", + ] + ) + nullable_fields = set(["tool_confirmations"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/conversationappendstreamrequest.py b/src/mistralai/client/models/conversationappendstreamrequest.py index a0d46f72..32f6b148 100644 --- a/src/mistralai/client/models/conversationappendstreamrequest.py +++ b/src/mistralai/client/models/conversationappendstreamrequest.py @@ -4,8 +4,16 @@ from __future__ import annotations from .completionargs import CompletionArgs, CompletionArgsTypedDict from .conversationinputs import ConversationInputs, ConversationInputsTypedDict -from mistralai.client.types import BaseModel -from typing import Literal, Optional +from .toolcallconfirmation import ToolCallConfirmation, ToolCallConfirmationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import List, Literal, Optional from typing_extensions import NotRequired, TypedDict @@ -16,17 +24,18 @@ class ConversationAppendStreamRequestTypedDict(TypedDict): - inputs: ConversationInputsTypedDict + inputs: NotRequired[ConversationInputsTypedDict] stream: NotRequired[bool] store: NotRequired[bool] r"""Whether to store the results into our servers or not.""" handoff_execution: NotRequired[ConversationAppendStreamRequestHandoffExecution] completion_args: NotRequired[CompletionArgsTypedDict] r"""White-listed arguments from the completion API""" + tool_confirmations: NotRequired[Nullable[List[ToolCallConfirmationTypedDict]]] class ConversationAppendStreamRequest(BaseModel): - inputs: ConversationInputs + inputs: Optional[ConversationInputs] = None stream: Optional[bool] = True @@ -39,3 +48,39 @@ class ConversationAppendStreamRequest(BaseModel): completion_args: Optional[CompletionArgs] = None r"""White-listed arguments from the completion API""" + + tool_confirmations: OptionalNullable[List[ToolCallConfirmation]] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "inputs", + "stream", + "store", + "handoff_execution", + "completion_args", + "tool_confirmations", + ] + ) + nullable_fields = set(["tool_confirmations"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/conversationevents.py b/src/mistralai/client/models/conversationevents.py index f2476038..17812983 100644 --- a/src/mistralai/client/models/conversationevents.py +++ b/src/mistralai/client/models/conversationevents.py @@ -25,9 +25,12 @@ ToolExecutionStartedEvent, ToolExecutionStartedEventTypedDict, ) +from functools import partial from mistralai.client.types import BaseModel -from pydantic import Field -from typing import Union +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union from typing_extensions import Annotated, TypeAliasType, TypedDict @@ -37,17 +40,41 @@ ResponseStartedEventTypedDict, ResponseDoneEventTypedDict, ResponseErrorEventTypedDict, - ToolExecutionStartedEventTypedDict, ToolExecutionDeltaEventTypedDict, ToolExecutionDoneEventTypedDict, AgentHandoffStartedEventTypedDict, AgentHandoffDoneEventTypedDict, - FunctionCallEventTypedDict, + ToolExecutionStartedEventTypedDict, MessageOutputEventTypedDict, + FunctionCallEventTypedDict, ], ) +class UnknownConversationEventsData(BaseModel): + r"""A ConversationEventsData variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_CONVERSATION_EVENTS_DATA_VARIANTS: dict[str, Any] = { + "agent.handoff.done": AgentHandoffDoneEvent, + "agent.handoff.started": AgentHandoffStartedEvent, + "conversation.response.done": ResponseDoneEvent, + "conversation.response.error": ResponseErrorEvent, + "conversation.response.started": ResponseStartedEvent, + "function.call.delta": FunctionCallEvent, + "message.output.delta": MessageOutputEvent, + "tool.execution.delta": ToolExecutionDeltaEvent, + "tool.execution.done": ToolExecutionDoneEvent, + "tool.execution.started": ToolExecutionStartedEvent, +} + + ConversationEventsData = Annotated[ Union[ AgentHandoffDoneEvent, @@ -60,8 +87,17 @@ ToolExecutionDeltaEvent, ToolExecutionDoneEvent, ToolExecutionStartedEvent, + UnknownConversationEventsData, ], - Field(discriminator="TYPE"), + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_CONVERSATION_EVENTS_DATA_VARIANTS, + unknown_cls=UnknownConversationEventsData, + union_name="ConversationEventsData", + ) + ), ] diff --git a/src/mistralai/client/models/conversationhistory.py b/src/mistralai/client/models/conversationhistory.py index 92d6cbf9..ceef115b 100644 --- a/src/mistralai/client/models/conversationhistory.py +++ b/src/mistralai/client/models/conversationhistory.py @@ -8,12 +8,13 @@ from .messageinputentry import MessageInputEntry, MessageInputEntryTypedDict from .messageoutputentry import MessageOutputEntry, MessageOutputEntryTypedDict from .toolexecutionentry import ToolExecutionEntry, ToolExecutionEntryTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -ConversationHistoryObject = Literal["conversation.history",] +from typing_extensions import Annotated, TypeAliasType, TypedDict EntryTypedDict = TypeAliasType( @@ -21,10 +22,10 @@ Union[ FunctionResultEntryTypedDict, MessageInputEntryTypedDict, - FunctionCallEntryTypedDict, - ToolExecutionEntryTypedDict, MessageOutputEntryTypedDict, AgentHandoffEntryTypedDict, + ToolExecutionEntryTypedDict, + FunctionCallEntryTypedDict, ], ) @@ -34,10 +35,10 @@ Union[ FunctionResultEntry, MessageInputEntry, - FunctionCallEntry, - ToolExecutionEntry, MessageOutputEntry, AgentHandoffEntry, + ToolExecutionEntry, + FunctionCallEntry, ], ) @@ -47,7 +48,7 @@ class ConversationHistoryTypedDict(TypedDict): conversation_id: str entries: List[EntryTypedDict] - object: NotRequired[ConversationHistoryObject] + object: Literal["conversation.history"] class ConversationHistory(BaseModel): @@ -57,4 +58,32 @@ class ConversationHistory(BaseModel): entries: List[Entry] - object: Optional[ConversationHistoryObject] = "conversation.history" + object: Annotated[ + Annotated[ + Optional[Literal["conversation.history"]], + AfterValidator(validate_const("conversation.history")), + ], + pydantic.Field(alias="object"), + ] = "conversation.history" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ConversationHistory.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/conversationmessages.py b/src/mistralai/client/models/conversationmessages.py index 1aa294a4..84664b62 100644 --- a/src/mistralai/client/models/conversationmessages.py +++ b/src/mistralai/client/models/conversationmessages.py @@ -3,12 +3,13 @@ from __future__ import annotations from .messageentries import MessageEntries, MessageEntriesTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -ConversationMessagesObject = Literal["conversation.messages",] +from typing_extensions import Annotated, TypedDict class ConversationMessagesTypedDict(TypedDict): @@ -16,7 +17,7 @@ class ConversationMessagesTypedDict(TypedDict): conversation_id: str messages: List[MessageEntriesTypedDict] - object: NotRequired[ConversationMessagesObject] + object: Literal["conversation.messages"] class ConversationMessages(BaseModel): @@ -26,4 +27,32 @@ class ConversationMessages(BaseModel): messages: List[MessageEntries] - object: Optional[ConversationMessagesObject] = "conversation.messages" + object: Annotated[ + Annotated[ + Optional[Literal["conversation.messages"]], + AfterValidator(validate_const("conversation.messages")), + ], + pydantic.Field(alias="object"), + ] = "conversation.messages" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ConversationMessages.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/conversationrequest.py b/src/mistralai/client/models/conversationrequest.py index 2005be82..83d599eb 100644 --- a/src/mistralai/client/models/conversationrequest.py +++ b/src/mistralai/client/models/conversationrequest.py @@ -31,11 +31,11 @@ ConversationRequestToolTypedDict = TypeAliasType( "ConversationRequestToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) @@ -50,7 +50,7 @@ WebSearchTool, WebSearchPremiumTool, ], - Field(discriminator="TYPE"), + Field(discriminator="type"), ] @@ -111,54 +111,53 @@ class ConversationRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "stream", - "store", - "handoff_execution", - "instructions", - "tools", - "completion_args", - "name", - "description", - "metadata", - "agent_id", - "agent_version", - "model", - ] - nullable_fields = [ - "store", - "handoff_execution", - "instructions", - "completion_args", - "name", - "description", - "metadata", - "agent_id", - "agent_version", - "model", - ] - null_default_fields = [] - + optional_fields = set( + [ + "stream", + "store", + "handoff_execution", + "instructions", + "tools", + "completion_args", + "name", + "description", + "metadata", + "agent_id", + "agent_version", + "model", + ] + ) + nullable_fields = set( + [ + "store", + "handoff_execution", + "instructions", + "completion_args", + "name", + "description", + "metadata", + "agent_id", + "agent_version", + "model", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/conversationresponse.py b/src/mistralai/client/models/conversationresponse.py index 24598ef3..f6c10969 100644 --- a/src/mistralai/client/models/conversationresponse.py +++ b/src/mistralai/client/models/conversationresponse.py @@ -7,28 +7,29 @@ from .functioncallentry import FunctionCallEntry, FunctionCallEntryTypedDict from .messageoutputentry import MessageOutputEntry, MessageOutputEntryTypedDict from .toolexecutionentry import ToolExecutionEntry, ToolExecutionEntryTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -ConversationResponseObject = Literal["conversation.response",] +from typing_extensions import Annotated, TypeAliasType, TypedDict OutputTypedDict = TypeAliasType( "OutputTypedDict", Union[ - ToolExecutionEntryTypedDict, - FunctionCallEntryTypedDict, MessageOutputEntryTypedDict, AgentHandoffEntryTypedDict, + ToolExecutionEntryTypedDict, + FunctionCallEntryTypedDict, ], ) Output = TypeAliasType( "Output", - Union[ToolExecutionEntry, FunctionCallEntry, MessageOutputEntry, AgentHandoffEntry], + Union[MessageOutputEntry, AgentHandoffEntry, ToolExecutionEntry, FunctionCallEntry], ) @@ -38,7 +39,7 @@ class ConversationResponseTypedDict(TypedDict): conversation_id: str outputs: List[OutputTypedDict] usage: ConversationUsageInfoTypedDict - object: NotRequired[ConversationResponseObject] + object: Literal["conversation.response"] class ConversationResponse(BaseModel): @@ -50,4 +51,32 @@ class ConversationResponse(BaseModel): usage: ConversationUsageInfo - object: Optional[ConversationResponseObject] = "conversation.response" + object: Annotated[ + Annotated[ + Optional[Literal["conversation.response"]], + AfterValidator(validate_const("conversation.response")), + ], + pydantic.Field(alias="object"), + ] = "conversation.response" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ConversationResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/conversationrestartrequest.py b/src/mistralai/client/models/conversationrestartrequest.py index 35d30993..7ae16aff 100644 --- a/src/mistralai/client/models/conversationrestartrequest.py +++ b/src/mistralai/client/models/conversationrestartrequest.py @@ -37,8 +37,8 @@ class ConversationRestartRequestTypedDict(TypedDict): r"""Request to restart a new conversation from a given entry in the conversation.""" - inputs: ConversationInputsTypedDict from_entry_id: str + inputs: NotRequired[ConversationInputsTypedDict] stream: NotRequired[bool] store: NotRequired[bool] r"""Whether to store the results into our servers or not.""" @@ -56,10 +56,10 @@ class ConversationRestartRequestTypedDict(TypedDict): class ConversationRestartRequest(BaseModel): r"""Request to restart a new conversation from a given entry in the conversation.""" - inputs: ConversationInputs - from_entry_id: str + inputs: Optional[ConversationInputs] = None + stream: Optional[bool] = False store: Optional[bool] = True @@ -78,37 +78,35 @@ class ConversationRestartRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "stream", - "store", - "handoff_execution", - "completion_args", - "metadata", - "agent_version", - ] - nullable_fields = ["metadata", "agent_version"] - null_default_fields = [] - + optional_fields = set( + [ + "inputs", + "stream", + "store", + "handoff_execution", + "completion_args", + "metadata", + "agent_version", + ] + ) + nullable_fields = set(["metadata", "agent_version"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/conversationrestartstreamrequest.py b/src/mistralai/client/models/conversationrestartstreamrequest.py index 0ddfb130..0e247261 100644 --- a/src/mistralai/client/models/conversationrestartstreamrequest.py +++ b/src/mistralai/client/models/conversationrestartstreamrequest.py @@ -37,8 +37,8 @@ class ConversationRestartStreamRequestTypedDict(TypedDict): r"""Request to restart a new conversation from a given entry in the conversation.""" - inputs: ConversationInputsTypedDict from_entry_id: str + inputs: NotRequired[ConversationInputsTypedDict] stream: NotRequired[bool] store: NotRequired[bool] r"""Whether to store the results into our servers or not.""" @@ -56,10 +56,10 @@ class ConversationRestartStreamRequestTypedDict(TypedDict): class ConversationRestartStreamRequest(BaseModel): r"""Request to restart a new conversation from a given entry in the conversation.""" - inputs: ConversationInputs - from_entry_id: str + inputs: Optional[ConversationInputs] = None + stream: Optional[bool] = True store: Optional[bool] = True @@ -82,37 +82,35 @@ class ConversationRestartStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "stream", - "store", - "handoff_execution", - "completion_args", - "metadata", - "agent_version", - ] - nullable_fields = ["metadata", "agent_version"] - null_default_fields = [] - + optional_fields = set( + [ + "inputs", + "stream", + "store", + "handoff_execution", + "completion_args", + "metadata", + "agent_version", + ] + ) + nullable_fields = set(["metadata", "agent_version"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/conversationstreamrequest.py b/src/mistralai/client/models/conversationstreamrequest.py index 379a8f28..a20dccae 100644 --- a/src/mistralai/client/models/conversationstreamrequest.py +++ b/src/mistralai/client/models/conversationstreamrequest.py @@ -31,11 +31,11 @@ ConversationStreamRequestToolTypedDict = TypeAliasType( "ConversationStreamRequestToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) @@ -50,7 +50,7 @@ WebSearchTool, WebSearchPremiumTool, ], - Field(discriminator="TYPE"), + Field(discriminator="type"), ] @@ -113,54 +113,53 @@ class ConversationStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "stream", - "store", - "handoff_execution", - "instructions", - "tools", - "completion_args", - "name", - "description", - "metadata", - "agent_id", - "agent_version", - "model", - ] - nullable_fields = [ - "store", - "handoff_execution", - "instructions", - "completion_args", - "name", - "description", - "metadata", - "agent_id", - "agent_version", - "model", - ] - null_default_fields = [] - + optional_fields = set( + [ + "stream", + "store", + "handoff_execution", + "instructions", + "tools", + "completion_args", + "name", + "description", + "metadata", + "agent_id", + "agent_version", + "model", + ] + ) + nullable_fields = set( + [ + "store", + "handoff_execution", + "instructions", + "completion_args", + "name", + "description", + "metadata", + "agent_id", + "agent_version", + "model", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/conversationthinkchunk.py b/src/mistralai/client/models/conversationthinkchunk.py new file mode 100644 index 00000000..e0e172e3 --- /dev/null +++ b/src/mistralai/client/models/conversationthinkchunk.py @@ -0,0 +1,65 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 77e59cde5c0f + +from __future__ import annotations +from .textchunk import TextChunk, TextChunkTypedDict +from .toolreferencechunk import ToolReferenceChunk, ToolReferenceChunkTypedDict +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import List, Literal, Optional, Union +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict + + +ConversationThinkChunkThinkingTypedDict = TypeAliasType( + "ConversationThinkChunkThinkingTypedDict", + Union[TextChunkTypedDict, ToolReferenceChunkTypedDict], +) + + +ConversationThinkChunkThinking = TypeAliasType( + "ConversationThinkChunkThinking", Union[TextChunk, ToolReferenceChunk] +) + + +class ConversationThinkChunkTypedDict(TypedDict): + thinking: List[ConversationThinkChunkThinkingTypedDict] + type: Literal["thinking"] + closed: NotRequired[bool] + + +class ConversationThinkChunk(BaseModel): + thinking: List[ConversationThinkChunkThinking] + + type: Annotated[ + Annotated[ + Optional[Literal["thinking"]], AfterValidator(validate_const("thinking")) + ], + pydantic.Field(alias="type"), + ] = "thinking" + + closed: Optional[bool] = True + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type", "closed"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ConversationThinkChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/conversationusageinfo.py b/src/mistralai/client/models/conversationusageinfo.py index 98db0f16..1e80f89e 100644 --- a/src/mistralai/client/models/conversationusageinfo.py +++ b/src/mistralai/client/models/conversationusageinfo.py @@ -35,36 +35,33 @@ class ConversationUsageInfo(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "prompt_tokens", - "completion_tokens", - "total_tokens", - "connector_tokens", - "connectors", - ] - nullable_fields = ["connector_tokens", "connectors"] - null_default_fields = [] - + optional_fields = set( + [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "connector_tokens", + "connectors", + ] + ) + nullable_fields = set(["connector_tokens", "connectors"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/agentcreationrequest.py b/src/mistralai/client/models/createagentrequest.py similarity index 66% rename from src/mistralai/client/models/agentcreationrequest.py rename to src/mistralai/client/models/createagentrequest.py index 898d42a9..54b09880 100644 --- a/src/mistralai/client/models/agentcreationrequest.py +++ b/src/mistralai/client/models/createagentrequest.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 35b7f4933b3e +# @generated-id: 442629bd914b from __future__ import annotations from .codeinterpretertool import CodeInterpreterTool, CodeInterpreterToolTypedDict @@ -21,20 +21,20 @@ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -AgentCreationRequestToolTypedDict = TypeAliasType( - "AgentCreationRequestToolTypedDict", +CreateAgentRequestToolTypedDict = TypeAliasType( + "CreateAgentRequestToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) -AgentCreationRequestTool = Annotated[ +CreateAgentRequestTool = Annotated[ Union[ CodeInterpreterTool, DocumentLibraryTool, @@ -43,16 +43,16 @@ WebSearchTool, WebSearchPremiumTool, ], - Field(discriminator="TYPE"), + Field(discriminator="type"), ] -class AgentCreationRequestTypedDict(TypedDict): +class CreateAgentRequestTypedDict(TypedDict): model: str name: str instructions: NotRequired[Nullable[str]] r"""Instruction prompt the model will follow during the conversation.""" - tools: NotRequired[List[AgentCreationRequestToolTypedDict]] + tools: NotRequired[List[CreateAgentRequestToolTypedDict]] r"""List of tools which are available to the model during the conversation.""" completion_args: NotRequired[CompletionArgsTypedDict] r"""White-listed arguments from the completion API""" @@ -62,7 +62,7 @@ class AgentCreationRequestTypedDict(TypedDict): version_message: NotRequired[Nullable[str]] -class AgentCreationRequest(BaseModel): +class CreateAgentRequest(BaseModel): model: str name: str @@ -70,7 +70,7 @@ class AgentCreationRequest(BaseModel): instructions: OptionalNullable[str] = UNSET r"""Instruction prompt the model will follow during the conversation.""" - tools: Optional[List[AgentCreationRequestTool]] = None + tools: Optional[List[CreateAgentRequestTool]] = None r"""List of tools which are available to the model during the conversation.""" completion_args: Optional[CompletionArgs] = None @@ -86,44 +86,37 @@ class AgentCreationRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "instructions", - "tools", - "completion_args", - "description", - "handoffs", - "metadata", - "version_message", - ] - nullable_fields = [ - "instructions", - "description", - "handoffs", - "metadata", - "version_message", - ] - null_default_fields = [] - + optional_fields = set( + [ + "instructions", + "tools", + "completion_args", + "description", + "handoffs", + "metadata", + "version_message", + ] + ) + nullable_fields = set( + ["instructions", "description", "handoffs", "metadata", "version_message"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/batchjobin.py b/src/mistralai/client/models/createbatchjobrequest.py similarity index 76% rename from src/mistralai/client/models/batchjobin.py rename to src/mistralai/client/models/createbatchjobrequest.py index a0c3b914..9a901fef 100644 --- a/src/mistralai/client/models/batchjobin.py +++ b/src/mistralai/client/models/createbatchjobrequest.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 72b25c2038d4 +# @generated-id: 56e24cd24e98 from __future__ import annotations from .apiendpoint import APIEndpoint @@ -16,7 +16,7 @@ from typing_extensions import NotRequired, TypedDict -class BatchJobInTypedDict(TypedDict): +class CreateBatchJobRequestTypedDict(TypedDict): endpoint: APIEndpoint input_files: NotRequired[Nullable[List[str]]] r"""The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```""" @@ -31,7 +31,7 @@ class BatchJobInTypedDict(TypedDict): r"""The timeout in hours for the batch inference job.""" -class BatchJobIn(BaseModel): +class CreateBatchJobRequest(BaseModel): endpoint: APIEndpoint input_files: OptionalNullable[List[str]] = UNSET @@ -53,37 +53,36 @@ class BatchJobIn(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "input_files", - "requests", - "model", - "agent_id", - "metadata", - "timeout_hours", - ] - nullable_fields = ["input_files", "requests", "model", "agent_id", "metadata"] - null_default_fields = [] - + optional_fields = set( + [ + "input_files", + "requests", + "model", + "agent_id", + "metadata", + "timeout_hours", + ] + ) + nullable_fields = set( + ["input_files", "requests", "model", "agent_id", "metadata"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/uploadfileout.py b/src/mistralai/client/models/createfileresponse.py similarity index 69% rename from src/mistralai/client/models/uploadfileout.py rename to src/mistralai/client/models/createfileresponse.py index be291efb..76821280 100644 --- a/src/mistralai/client/models/uploadfileout.py +++ b/src/mistralai/client/models/createfileresponse.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 42466f2bebfb +# @generated-id: fea5e4832dcc from __future__ import annotations from .filepurpose import FilePurpose @@ -17,7 +17,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class UploadFileOutTypedDict(TypedDict): +class CreateFileResponseTypedDict(TypedDict): id: str r"""The unique identifier of the file.""" object: str @@ -36,7 +36,7 @@ class UploadFileOutTypedDict(TypedDict): signature: NotRequired[Nullable[str]] -class UploadFileOut(BaseModel): +class CreateFileResponse(BaseModel): id: str r"""The unique identifier of the file.""" @@ -66,30 +66,31 @@ class UploadFileOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["num_lines", "mimetype", "signature"] - nullable_fields = ["num_lines", "mimetype", "signature"] - null_default_fields = [] - + optional_fields = set(["num_lines", "mimetype", "signature"]) + nullable_fields = set(["num_lines", "mimetype", "signature"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + CreateFileResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/createfinetuningjobop.py b/src/mistralai/client/models/createfinetuningjobop.py deleted file mode 100644 index f55deef5..00000000 --- a/src/mistralai/client/models/createfinetuningjobop.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: fd3c305df250 - -from __future__ import annotations -from .classifierjobout import ClassifierJobOut, ClassifierJobOutTypedDict -from .completionjobout import CompletionJobOut, CompletionJobOutTypedDict -from .legacyjobmetadataout import LegacyJobMetadataOut, LegacyJobMetadataOutTypedDict -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType - - -ResponseTypedDict = TypeAliasType( - "ResponseTypedDict", Union[ClassifierJobOutTypedDict, CompletionJobOutTypedDict] -) - - -Response = Annotated[ - Union[ClassifierJobOut, CompletionJobOut], Field(discriminator="JOB_TYPE") -] - - -CreateFineTuningJobResponseTypedDict = TypeAliasType( - "CreateFineTuningJobResponseTypedDict", - Union[LegacyJobMetadataOutTypedDict, ResponseTypedDict], -) -r"""OK""" - - -CreateFineTuningJobResponse = TypeAliasType( - "CreateFineTuningJobResponse", Union[LegacyJobMetadataOut, Response] -) -r"""OK""" diff --git a/src/mistralai/client/models/jobin.py b/src/mistralai/client/models/createfinetuningjobrequest.py similarity index 56% rename from src/mistralai/client/models/jobin.py rename to src/mistralai/client/models/createfinetuningjobrequest.py index b3cb8998..e328d944 100644 --- a/src/mistralai/client/models/jobin.py +++ b/src/mistralai/client/models/createfinetuningjobrequest.py @@ -1,15 +1,15 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: f4d176123ccc +# @generated-id: c60d2a45d66b from __future__ import annotations -from .classifiertargetin import ClassifierTargetIn, ClassifierTargetInTypedDict -from .classifiertrainingparametersin import ( - ClassifierTrainingParametersIn, - ClassifierTrainingParametersInTypedDict, +from .classifiertarget import ClassifierTarget, ClassifierTargetTypedDict +from .classifiertrainingparameters import ( + ClassifierTrainingParameters, + ClassifierTrainingParametersTypedDict, ) -from .completiontrainingparametersin import ( - CompletionTrainingParametersIn, - CompletionTrainingParametersInTypedDict, +from .completiontrainingparameters import ( + CompletionTrainingParameters, + CompletionTrainingParametersTypedDict, ) from .finetuneablemodeltype import FineTuneableModelType from .githubrepositoryin import GithubRepositoryIn, GithubRepositoryInTypedDict @@ -27,33 +27,30 @@ from typing_extensions import NotRequired, TypeAliasType, TypedDict -JobInIntegrationTypedDict = WandbIntegrationTypedDict +CreateFineTuningJobRequestIntegrationTypedDict = WandbIntegrationTypedDict -JobInIntegration = WandbIntegration +CreateFineTuningJobRequestIntegration = WandbIntegration HyperparametersTypedDict = TypeAliasType( "HyperparametersTypedDict", - Union[ - ClassifierTrainingParametersInTypedDict, CompletionTrainingParametersInTypedDict - ], + Union[ClassifierTrainingParametersTypedDict, CompletionTrainingParametersTypedDict], ) Hyperparameters = TypeAliasType( - "Hyperparameters", - Union[ClassifierTrainingParametersIn, CompletionTrainingParametersIn], + "Hyperparameters", Union[ClassifierTrainingParameters, CompletionTrainingParameters] ) -JobInRepositoryTypedDict = GithubRepositoryInTypedDict +CreateFineTuningJobRequestRepositoryTypedDict = GithubRepositoryInTypedDict -JobInRepository = GithubRepositoryIn +CreateFineTuningJobRequestRepository = GithubRepositoryIn -class JobInTypedDict(TypedDict): +class CreateFineTuningJobRequestTypedDict(TypedDict): model: str hyperparameters: HyperparametersTypedDict training_files: NotRequired[List[TrainingFileTypedDict]] @@ -61,17 +58,21 @@ class JobInTypedDict(TypedDict): r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files.""" suffix: NotRequired[Nullable[str]] r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`""" - integrations: NotRequired[Nullable[List[JobInIntegrationTypedDict]]] + integrations: NotRequired[ + Nullable[List[CreateFineTuningJobRequestIntegrationTypedDict]] + ] r"""A list of integrations to enable for your fine-tuning job.""" auto_start: NotRequired[bool] r"""This field will be required in a future release.""" invalid_sample_skip_percentage: NotRequired[float] job_type: NotRequired[Nullable[FineTuneableModelType]] - repositories: NotRequired[Nullable[List[JobInRepositoryTypedDict]]] - classifier_targets: NotRequired[Nullable[List[ClassifierTargetInTypedDict]]] + repositories: NotRequired[ + Nullable[List[CreateFineTuningJobRequestRepositoryTypedDict]] + ] + classifier_targets: NotRequired[Nullable[List[ClassifierTargetTypedDict]]] -class JobIn(BaseModel): +class CreateFineTuningJobRequest(BaseModel): model: str hyperparameters: Hyperparameters @@ -84,7 +85,7 @@ class JobIn(BaseModel): suffix: OptionalNullable[str] = UNSET r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`""" - integrations: OptionalNullable[List[JobInIntegration]] = UNSET + integrations: OptionalNullable[List[CreateFineTuningJobRequestIntegration]] = UNSET r"""A list of integrations to enable for your fine-tuning job.""" auto_start: Optional[bool] = None @@ -94,53 +95,52 @@ class JobIn(BaseModel): job_type: OptionalNullable[FineTuneableModelType] = UNSET - repositories: OptionalNullable[List[JobInRepository]] = UNSET + repositories: OptionalNullable[List[CreateFineTuningJobRequestRepository]] = UNSET - classifier_targets: OptionalNullable[List[ClassifierTargetIn]] = UNSET + classifier_targets: OptionalNullable[List[ClassifierTarget]] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "training_files", - "validation_files", - "suffix", - "integrations", - "auto_start", - "invalid_sample_skip_percentage", - "job_type", - "repositories", - "classifier_targets", - ] - nullable_fields = [ - "validation_files", - "suffix", - "integrations", - "job_type", - "repositories", - "classifier_targets", - ] - null_default_fields = [] - + optional_fields = set( + [ + "training_files", + "validation_files", + "suffix", + "integrations", + "auto_start", + "invalid_sample_skip_percentage", + "job_type", + "repositories", + "classifier_targets", + ] + ) + nullable_fields = set( + [ + "validation_files", + "suffix", + "integrations", + "job_type", + "repositories", + "classifier_targets", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/libraryin.py b/src/mistralai/client/models/createlibraryrequest.py similarity index 50% rename from src/mistralai/client/models/libraryin.py rename to src/mistralai/client/models/createlibraryrequest.py index 1a71d410..58874e01 100644 --- a/src/mistralai/client/models/libraryin.py +++ b/src/mistralai/client/models/createlibraryrequest.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 6147d5df71d9 +# @generated-id: 1c489bec2f53 from __future__ import annotations from mistralai.client.types import ( @@ -13,13 +13,13 @@ from typing_extensions import NotRequired, TypedDict -class LibraryInTypedDict(TypedDict): +class CreateLibraryRequestTypedDict(TypedDict): name: str description: NotRequired[Nullable[str]] chunk_size: NotRequired[Nullable[int]] -class LibraryIn(BaseModel): +class CreateLibraryRequest(BaseModel): name: str description: OptionalNullable[str] = UNSET @@ -28,30 +28,25 @@ class LibraryIn(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["description", "chunk_size"] - nullable_fields = ["description", "chunk_size"] - null_default_fields = [] - + optional_fields = set(["description", "chunk_size"]) + nullable_fields = set(["description", "chunk_size"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/deletemodelop.py b/src/mistralai/client/models/delete_model_v1_models_model_id_deleteop.py similarity index 76% rename from src/mistralai/client/models/deletemodelop.py rename to src/mistralai/client/models/delete_model_v1_models_model_id_deleteop.py index 55c4b242..199614f5 100644 --- a/src/mistralai/client/models/deletemodelop.py +++ b/src/mistralai/client/models/delete_model_v1_models_model_id_deleteop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 2c494d99a44d +# @generated-id: 767aba526e43 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class DeleteModelRequestTypedDict(TypedDict): +class DeleteModelV1ModelsModelIDDeleteRequestTypedDict(TypedDict): model_id: str r"""The ID of the model to delete.""" -class DeleteModelRequest(BaseModel): +class DeleteModelV1ModelsModelIDDeleteRequest(BaseModel): model_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deletefileout.py b/src/mistralai/client/models/deletefileresponse.py similarity index 82% rename from src/mistralai/client/models/deletefileout.py rename to src/mistralai/client/models/deletefileresponse.py index c721f32c..ffd0e0d0 100644 --- a/src/mistralai/client/models/deletefileout.py +++ b/src/mistralai/client/models/deletefileresponse.py @@ -1,12 +1,12 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 5578701e7327 +# @generated-id: 3ee464763a32 from __future__ import annotations from mistralai.client.types import BaseModel from typing_extensions import TypedDict -class DeleteFileOutTypedDict(TypedDict): +class DeleteFileResponseTypedDict(TypedDict): id: str r"""The ID of the deleted file.""" object: str @@ -15,7 +15,7 @@ class DeleteFileOutTypedDict(TypedDict): r"""The deletion status.""" -class DeleteFileOut(BaseModel): +class DeleteFileResponse(BaseModel): id: str r"""The ID of the deleted file.""" diff --git a/src/mistralai/client/models/deletemodelout.py b/src/mistralai/client/models/deletemodelout.py index bf22ed17..fa0c20a4 100644 --- a/src/mistralai/client/models/deletemodelout.py +++ b/src/mistralai/client/models/deletemodelout.py @@ -2,7 +2,8 @@ # @generated-id: ef6a1671c739 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -25,3 +26,19 @@ class DeleteModelOut(BaseModel): deleted: Optional[bool] = True r"""The deletion status""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "deleted"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/deltamessage.py b/src/mistralai/client/models/deltamessage.py index fbb8231a..d9fa230e 100644 --- a/src/mistralai/client/models/deltamessage.py +++ b/src/mistralai/client/models/deltamessage.py @@ -41,30 +41,25 @@ class DeltaMessage(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["role", "content", "tool_calls"] - nullable_fields = ["role", "content", "tool_calls"] - null_default_fields = [] - + optional_fields = set(["role", "content", "tool_calls"]) + nullable_fields = set(["role", "content", "tool_calls"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/documentout.py b/src/mistralai/client/models/document.py similarity index 60% rename from src/mistralai/client/models/documentout.py rename to src/mistralai/client/models/document.py index 3b1a5713..31eebbd1 100644 --- a/src/mistralai/client/models/documentout.py +++ b/src/mistralai/client/models/document.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 7a85b9dca506 +# @generated-id: fbbf7428328c from __future__ import annotations from datetime import datetime @@ -15,7 +15,7 @@ from typing_extensions import NotRequired, TypedDict -class DocumentOutTypedDict(TypedDict): +class DocumentTypedDict(TypedDict): id: str library_id: str hash: Nullable[str] @@ -24,9 +24,9 @@ class DocumentOutTypedDict(TypedDict): size: Nullable[int] name: str created_at: datetime - processing_status: str uploaded_by_id: Nullable[str] uploaded_by_type: str + processing_status: str tokens_processing_total: int summary: NotRequired[Nullable[str]] last_processed_at: NotRequired[Nullable[datetime]] @@ -37,7 +37,7 @@ class DocumentOutTypedDict(TypedDict): attributes: NotRequired[Nullable[Dict[str, Any]]] -class DocumentOut(BaseModel): +class Document(BaseModel): id: str library_id: str @@ -54,12 +54,12 @@ class DocumentOut(BaseModel): created_at: datetime - processing_status: str - uploaded_by_id: Nullable[str] uploaded_by_type: str + processing_status: str + tokens_processing_total: int summary: OptionalNullable[str] = UNSET @@ -78,51 +78,50 @@ class DocumentOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "summary", - "last_processed_at", - "number_of_pages", - "tokens_processing_main_content", - "tokens_processing_summary", - "url", - "attributes", - ] - nullable_fields = [ - "hash", - "mime_type", - "extension", - "size", - "summary", - "last_processed_at", - "number_of_pages", - "uploaded_by_id", - "tokens_processing_main_content", - "tokens_processing_summary", - "url", - "attributes", - ] - null_default_fields = [] - + optional_fields = set( + [ + "summary", + "last_processed_at", + "number_of_pages", + "tokens_processing_main_content", + "tokens_processing_summary", + "url", + "attributes", + ] + ) + nullable_fields = set( + [ + "hash", + "mime_type", + "extension", + "size", + "summary", + "last_processed_at", + "number_of_pages", + "uploaded_by_id", + "tokens_processing_main_content", + "tokens_processing_summary", + "url", + "attributes", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/documentlibrarytool.py b/src/mistralai/client/models/documentlibrarytool.py index ff0f7393..642c3202 100644 --- a/src/mistralai/client/models/documentlibrarytool.py +++ b/src/mistralai/client/models/documentlibrarytool.py @@ -2,17 +2,26 @@ # @generated-id: 3eb3c218f457 from __future__ import annotations -from mistralai.client.types import BaseModel +from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import List, Literal -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, TypedDict class DocumentLibraryToolTypedDict(TypedDict): library_ids: List[str] r"""Ids of the library in which to search.""" + tool_configuration: NotRequired[Nullable[ToolConfigurationTypedDict]] type: Literal["document_library"] @@ -20,10 +29,43 @@ class DocumentLibraryTool(BaseModel): library_ids: List[str] r"""Ids of the library in which to search.""" - TYPE: Annotated[ + tool_configuration: OptionalNullable[ToolConfiguration] = UNSET + + type: Annotated[ Annotated[ Literal["document_library"], AfterValidator(validate_const("document_library")), ], pydantic.Field(alias="type"), ] = "document_library" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["tool_configuration"]) + nullable_fields = set(["tool_configuration"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + DocumentLibraryTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/documenturlchunk.py b/src/mistralai/client/models/documenturlchunk.py index 304cde2b..43444d98 100644 --- a/src/mistralai/client/models/documenturlchunk.py +++ b/src/mistralai/client/models/documenturlchunk.py @@ -9,55 +9,62 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -DocumentURLChunkType = Literal["document_url",] +from typing_extensions import Annotated, NotRequired, TypedDict class DocumentURLChunkTypedDict(TypedDict): document_url: str + type: Literal["document_url"] document_name: NotRequired[Nullable[str]] r"""The filename of the document""" - type: NotRequired[DocumentURLChunkType] class DocumentURLChunk(BaseModel): document_url: str + type: Annotated[ + Annotated[ + Optional[Literal["document_url"]], + AfterValidator(validate_const("document_url")), + ], + pydantic.Field(alias="type"), + ] = "document_url" + document_name: OptionalNullable[str] = UNSET r"""The filename of the document""" - type: Optional[DocumentURLChunkType] = "document_url" - @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["document_name", "type"] - nullable_fields = ["document_name"] - null_default_fields = [] - + optional_fields = set(["type", "document_name"]) + nullable_fields = set(["document_name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + DocumentURLChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/embeddingrequest.py b/src/mistralai/client/models/embeddingrequest.py index f4537ffa..15950590 100644 --- a/src/mistralai/client/models/embeddingrequest.py +++ b/src/mistralai/client/models/embeddingrequest.py @@ -57,35 +57,33 @@ class EmbeddingRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "metadata", - "output_dimension", - "output_dtype", - "encoding_format", - ] - nullable_fields = ["metadata", "output_dimension"] - null_default_fields = [] - + optional_fields = set( + ["metadata", "output_dimension", "output_dtype", "encoding_format"] + ) + nullable_fields = set(["metadata", "output_dimension"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + EmbeddingRequest.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/embeddingresponsedata.py b/src/mistralai/client/models/embeddingresponsedata.py index a689b290..098cfae0 100644 --- a/src/mistralai/client/models/embeddingresponsedata.py +++ b/src/mistralai/client/models/embeddingresponsedata.py @@ -2,7 +2,8 @@ # @generated-id: 6d6ead6f3803 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import List, Optional from typing_extensions import NotRequired, TypedDict @@ -19,3 +20,19 @@ class EmbeddingResponseData(BaseModel): embedding: Optional[List[float]] = None index: Optional[int] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "embedding", "index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/eventout.py b/src/mistralai/client/models/event.py similarity index 56% rename from src/mistralai/client/models/eventout.py rename to src/mistralai/client/models/event.py index a0247555..c40ae2b1 100644 --- a/src/mistralai/client/models/eventout.py +++ b/src/mistralai/client/models/event.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: da8ad645a9cb +# @generated-id: e5a68ac2dd57 from __future__ import annotations from mistralai.client.types import ( @@ -14,7 +14,7 @@ from typing_extensions import NotRequired, TypedDict -class EventOutTypedDict(TypedDict): +class EventTypedDict(TypedDict): name: str r"""The name of the event.""" created_at: int @@ -22,7 +22,7 @@ class EventOutTypedDict(TypedDict): data: NotRequired[Nullable[Dict[str, Any]]] -class EventOut(BaseModel): +class Event(BaseModel): name: str r"""The name of the event.""" @@ -33,30 +33,25 @@ class EventOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["data"] - nullable_fields = ["data"] - null_default_fields = [] - + optional_fields = set(["data"]) + nullable_fields = set(["data"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/file.py b/src/mistralai/client/models/file.py index dbbc00b5..1b0ea1d4 100644 --- a/src/mistralai/client/models/file.py +++ b/src/mistralai/client/models/file.py @@ -3,9 +3,10 @@ from __future__ import annotations import io -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import FieldMetadata, MultipartFormMetadata import pydantic +from pydantic import model_serializer from typing import IO, Optional, Union from typing_extensions import Annotated, NotRequired, TypedDict @@ -32,3 +33,19 @@ class File(BaseModel): pydantic.Field(alias="Content-Type"), FieldMetadata(multipart=True), ] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["contentType"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/filechunk.py b/src/mistralai/client/models/filechunk.py index 43ef22f8..5c8d2646 100644 --- a/src/mistralai/client/models/filechunk.py +++ b/src/mistralai/client/models/filechunk.py @@ -2,9 +2,10 @@ # @generated-id: ff3c2d33ab1e from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, TypedDict @@ -18,7 +19,29 @@ class FileChunkTypedDict(TypedDict): class FileChunk(BaseModel): file_id: str - TYPE: Annotated[ + type: Annotated[ Annotated[Optional[Literal["file"]], AfterValidator(validate_const("file"))], pydantic.Field(alias="type"), ] = "file" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + FileChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/downloadfileop.py b/src/mistralai/client/models/files_api_routes_delete_fileop.py similarity index 74% rename from src/mistralai/client/models/downloadfileop.py rename to src/mistralai/client/models/files_api_routes_delete_fileop.py index fcdc01d6..eaba274b 100644 --- a/src/mistralai/client/models/downloadfileop.py +++ b/src/mistralai/client/models/files_api_routes_delete_fileop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4d051f08057d +# @generated-id: 2f385cc6138f from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class DownloadFileRequestTypedDict(TypedDict): +class FilesAPIRoutesDeleteFileRequestTypedDict(TypedDict): file_id: str -class DownloadFileRequest(BaseModel): +class FilesAPIRoutesDeleteFileRequest(BaseModel): file_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deletefileop.py b/src/mistralai/client/models/files_api_routes_download_fileop.py similarity index 73% rename from src/mistralai/client/models/deletefileop.py rename to src/mistralai/client/models/files_api_routes_download_fileop.py index 4feb7812..83de8e73 100644 --- a/src/mistralai/client/models/deletefileop.py +++ b/src/mistralai/client/models/files_api_routes_download_fileop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 286b4e583638 +# @generated-id: 8184ee3577c3 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class DeleteFileRequestTypedDict(TypedDict): +class FilesAPIRoutesDownloadFileRequestTypedDict(TypedDict): file_id: str -class DeleteFileRequest(BaseModel): +class FilesAPIRoutesDownloadFileRequest(BaseModel): file_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getfilesignedurlop.py b/src/mistralai/client/models/files_api_routes_get_signed_urlop.py similarity index 51% rename from src/mistralai/client/models/getfilesignedurlop.py rename to src/mistralai/client/models/files_api_routes_get_signed_urlop.py index 06ed79ee..64cd6ac5 100644 --- a/src/mistralai/client/models/getfilesignedurlop.py +++ b/src/mistralai/client/models/files_api_routes_get_signed_urlop.py @@ -1,20 +1,21 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 1aa50b81c8cf +# @generated-id: 0a1a18c6431e from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +from pydantic import model_serializer from typing import Optional from typing_extensions import Annotated, NotRequired, TypedDict -class GetFileSignedURLRequestTypedDict(TypedDict): +class FilesAPIRoutesGetSignedURLRequestTypedDict(TypedDict): file_id: str expiry: NotRequired[int] r"""Number of hours before the url becomes invalid. Defaults to 24h""" -class GetFileSignedURLRequest(BaseModel): +class FilesAPIRoutesGetSignedURLRequest(BaseModel): file_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] @@ -24,3 +25,19 @@ class GetFileSignedURLRequest(BaseModel): FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), ] = 24 r"""Number of hours before the url becomes invalid. Defaults to 24h""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["expiry"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/listfilesop.py b/src/mistralai/client/models/files_api_routes_list_filesop.py similarity index 70% rename from src/mistralai/client/models/listfilesop.py rename to src/mistralai/client/models/files_api_routes_list_filesop.py index a9af5c70..b03e2f88 100644 --- a/src/mistralai/client/models/listfilesop.py +++ b/src/mistralai/client/models/files_api_routes_list_filesop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: e5bd46ac0145 +# @generated-id: b2e92f2a29b4 from __future__ import annotations from .filepurpose import FilePurpose @@ -18,7 +18,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class ListFilesRequestTypedDict(TypedDict): +class FilesAPIRoutesListFilesRequestTypedDict(TypedDict): page: NotRequired[int] page_size: NotRequired[int] include_total: NotRequired[bool] @@ -29,7 +29,7 @@ class ListFilesRequestTypedDict(TypedDict): mimetypes: NotRequired[Nullable[List[str]]] -class ListFilesRequest(BaseModel): +class FilesAPIRoutesListFilesRequest(BaseModel): page: Annotated[ Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), @@ -72,39 +72,38 @@ class ListFilesRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "page", - "page_size", - "include_total", - "sample_type", - "source", - "search", - "purpose", - "mimetypes", - ] - nullable_fields = ["sample_type", "source", "search", "purpose", "mimetypes"] - null_default_fields = [] - + optional_fields = set( + [ + "page", + "page_size", + "include_total", + "sample_type", + "source", + "search", + "purpose", + "mimetypes", + ] + ) + nullable_fields = set( + ["sample_type", "source", "search", "purpose", "mimetypes"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/retrievefileop.py b/src/mistralai/client/models/files_api_routes_retrieve_fileop.py similarity index 73% rename from src/mistralai/client/models/retrievefileop.py rename to src/mistralai/client/models/files_api_routes_retrieve_fileop.py index edd50e57..5f8de05f 100644 --- a/src/mistralai/client/models/retrievefileop.py +++ b/src/mistralai/client/models/files_api_routes_retrieve_fileop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ee73efdf9180 +# @generated-id: 5d5dbb8d5f7a from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class RetrieveFileRequestTypedDict(TypedDict): +class FilesAPIRoutesRetrieveFileRequestTypedDict(TypedDict): file_id: str -class RetrieveFileRequest(BaseModel): +class FilesAPIRoutesRetrieveFileRequest(BaseModel): file_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/uploadfileop.py b/src/mistralai/client/models/files_api_routes_upload_fileop.py similarity index 70% rename from src/mistralai/client/models/uploadfileop.py rename to src/mistralai/client/models/files_api_routes_upload_fileop.py index 50848f0b..54ff4e49 100644 --- a/src/mistralai/client/models/uploadfileop.py +++ b/src/mistralai/client/models/files_api_routes_upload_fileop.py @@ -1,11 +1,12 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d67619670938 +# @generated-id: f13b84de6fa7 from __future__ import annotations from .file import File, FileTypedDict from .filepurpose import FilePurpose -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import FieldMetadata, MultipartFormMetadata +from pydantic import model_serializer from typing import Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -39,3 +40,19 @@ class MultiPartBodyParams(BaseModel): """ purpose: Annotated[Optional[FilePurpose], FieldMetadata(multipart=True)] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["purpose"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/fileschema.py b/src/mistralai/client/models/fileschema.py index cbe9b0d1..e99066a9 100644 --- a/src/mistralai/client/models/fileschema.py +++ b/src/mistralai/client/models/fileschema.py @@ -66,30 +66,31 @@ class FileSchema(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["num_lines", "mimetype", "signature"] - nullable_fields = ["num_lines", "mimetype", "signature"] - null_default_fields = [] - + optional_fields = set(["num_lines", "mimetype", "signature"]) + nullable_fields = set(["num_lines", "mimetype", "signature"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + FileSchema.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/fimcompletionrequest.py b/src/mistralai/client/models/fimcompletionrequest.py index e2f60327..ea877213 100644 --- a/src/mistralai/client/models/fimcompletionrequest.py +++ b/src/mistralai/client/models/fimcompletionrequest.py @@ -85,47 +85,46 @@ class FIMCompletionRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/fimcompletionstreamrequest.py b/src/mistralai/client/models/fimcompletionstreamrequest.py index 480ed17a..e80efc09 100644 --- a/src/mistralai/client/models/fimcompletionstreamrequest.py +++ b/src/mistralai/client/models/fimcompletionstreamrequest.py @@ -83,47 +83,46 @@ class FIMCompletionStreamRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "temperature", - "top_p", - "max_tokens", - "stream", - "stop", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - nullable_fields = [ - "temperature", - "max_tokens", - "random_seed", - "metadata", - "suffix", - "min_tokens", - ] - null_default_fields = [] - + optional_fields = set( + [ + "temperature", + "top_p", + "max_tokens", + "stream", + "stop", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) + nullable_fields = set( + [ + "temperature", + "max_tokens", + "random_seed", + "metadata", + "suffix", + "min_tokens", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/finetunedmodelcapabilities.py b/src/mistralai/client/models/finetunedmodelcapabilities.py new file mode 100644 index 00000000..2f4cca0b --- /dev/null +++ b/src/mistralai/client/models/finetunedmodelcapabilities.py @@ -0,0 +1,52 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 475c805eab95 + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer +from typing import Optional +from typing_extensions import NotRequired, TypedDict + + +class FineTunedModelCapabilitiesTypedDict(TypedDict): + completion_chat: NotRequired[bool] + completion_fim: NotRequired[bool] + function_calling: NotRequired[bool] + fine_tuning: NotRequired[bool] + classification: NotRequired[bool] + + +class FineTunedModelCapabilities(BaseModel): + completion_chat: Optional[bool] = True + + completion_fim: Optional[bool] = False + + function_calling: Optional[bool] = False + + fine_tuning: Optional[bool] = False + + classification: Optional[bool] = False + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "completion_chat", + "completion_fim", + "function_calling", + "fine_tuning", + "classification", + ] + ) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/ftmodelcapabilitiesout.py b/src/mistralai/client/models/ftmodelcapabilitiesout.py deleted file mode 100644 index 42269b78..00000000 --- a/src/mistralai/client/models/ftmodelcapabilitiesout.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: f70517be97d4 - -from __future__ import annotations -from mistralai.client.types import BaseModel -from typing import Optional -from typing_extensions import NotRequired, TypedDict - - -class FTModelCapabilitiesOutTypedDict(TypedDict): - completion_chat: NotRequired[bool] - completion_fim: NotRequired[bool] - function_calling: NotRequired[bool] - fine_tuning: NotRequired[bool] - classification: NotRequired[bool] - - -class FTModelCapabilitiesOut(BaseModel): - completion_chat: Optional[bool] = True - - completion_fim: Optional[bool] = False - - function_calling: Optional[bool] = False - - fine_tuning: Optional[bool] = False - - classification: Optional[bool] = False diff --git a/src/mistralai/client/models/ftmodelcard.py b/src/mistralai/client/models/ftmodelcard.py index 570e95e2..2c26ff2f 100644 --- a/src/mistralai/client/models/ftmodelcard.py +++ b/src/mistralai/client/models/ftmodelcard.py @@ -71,7 +71,7 @@ class FTModelCard(BaseModel): default_model_temperature: OptionalNullable[float] = UNSET - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["fine-tuned"], AfterValidator(validate_const("fine-tuned"))], pydantic.Field(alias="type"), ] = "fine-tuned" @@ -80,48 +80,53 @@ class FTModelCard(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "created", - "owned_by", - "name", - "description", - "max_context_length", - "aliases", - "deprecation", - "deprecation_replacement_model", - "default_model_temperature", - "archived", - ] - nullable_fields = [ - "name", - "description", - "deprecation", - "deprecation_replacement_model", - "default_model_temperature", - ] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "created", + "owned_by", + "name", + "description", + "max_context_length", + "aliases", + "deprecation", + "deprecation_replacement_model", + "default_model_temperature", + "archived", + ] + ) + nullable_fields = set( + [ + "name", + "description", + "deprecation", + "deprecation_replacement_model", + "default_model_temperature", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + FTModelCard.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/function.py b/src/mistralai/client/models/function.py index 3632c1af..1da1dcc9 100644 --- a/src/mistralai/client/models/function.py +++ b/src/mistralai/client/models/function.py @@ -2,7 +2,8 @@ # @generated-id: 32275a9d8fee from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Any, Dict, Optional from typing_extensions import NotRequired, TypedDict @@ -22,3 +23,19 @@ class Function(BaseModel): description: Optional[str] = None strict: Optional[bool] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["description", "strict"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/functioncallentry.py b/src/mistralai/client/models/functioncallentry.py index 6ada1d35..d05fad85 100644 --- a/src/mistralai/client/models/functioncallentry.py +++ b/src/mistralai/client/models/functioncallentry.py @@ -13,27 +13,38 @@ OptionalNullable, UNSET, UNSET_SENTINEL, + UnrecognizedStr, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer -from typing import Literal, Optional -from typing_extensions import NotRequired, TypedDict +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional, Union +from typing_extensions import Annotated, NotRequired, TypedDict -FunctionCallEntryObject = Literal["entry",] - - -FunctionCallEntryType = Literal["function.call",] +FunctionCallEntryConfirmationStatus = Union[ + Literal[ + "pending", + "allowed", + "denied", + ], + UnrecognizedStr, +] class FunctionCallEntryTypedDict(TypedDict): tool_call_id: str name: str arguments: FunctionCallEntryArgumentsTypedDict - object: NotRequired[FunctionCallEntryObject] - type: NotRequired[FunctionCallEntryType] + object: Literal["entry"] + type: Literal["function.call"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] + agent_id: NotRequired[Nullable[str]] + model: NotRequired[Nullable[str]] id: NotRequired[str] + confirmation_status: NotRequired[Nullable[FunctionCallEntryConfirmationStatus]] class FunctionCallEntry(BaseModel): @@ -43,42 +54,71 @@ class FunctionCallEntry(BaseModel): arguments: FunctionCallEntryArguments - object: Optional[FunctionCallEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[FunctionCallEntryType] = "function.call" + type: Annotated[ + Annotated[ + Optional[Literal["function.call"]], + AfterValidator(validate_const("function.call")), + ], + pydantic.Field(alias="type"), + ] = "function.call" created_at: Optional[datetime] = None completed_at: OptionalNullable[datetime] = UNSET + agent_id: OptionalNullable[str] = UNSET + + model: OptionalNullable[str] = UNSET + id: Optional[str] = None + confirmation_status: OptionalNullable[FunctionCallEntryConfirmationStatus] = UNSET + @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["object", "type", "created_at", "completed_at", "id"] - nullable_fields = ["completed_at"] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "type", + "created_at", + "completed_at", + "agent_id", + "model", + "id", + "confirmation_status", + ] + ) + nullable_fields = set( + ["completed_at", "agent_id", "model", "confirmation_status"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + FunctionCallEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/functioncallevent.py b/src/mistralai/client/models/functioncallevent.py index 5d871a0e..849eed76 100644 --- a/src/mistralai/client/models/functioncallevent.py +++ b/src/mistralai/client/models/functioncallevent.py @@ -3,14 +3,32 @@ from __future__ import annotations from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, + UnrecognizedStr, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator -from typing import Literal, Optional +from typing import Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypedDict +FunctionCallEventConfirmationStatus = Union[ + Literal[ + "pending", + "allowed", + "denied", + ], + UnrecognizedStr, +] + + class FunctionCallEventTypedDict(TypedDict): id: str name: str @@ -19,6 +37,9 @@ class FunctionCallEventTypedDict(TypedDict): type: Literal["function.call.delta"] created_at: NotRequired[datetime] output_index: NotRequired[int] + model: NotRequired[Nullable[str]] + agent_id: NotRequired[Nullable[str]] + confirmation_status: NotRequired[Nullable[FunctionCallEventConfirmationStatus]] class FunctionCallEvent(BaseModel): @@ -30,7 +51,7 @@ class FunctionCallEvent(BaseModel): arguments: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["function.call.delta"], AfterValidator(validate_const("function.call.delta")), @@ -41,3 +62,42 @@ class FunctionCallEvent(BaseModel): created_at: Optional[datetime] = None output_index: Optional[int] = 0 + + model: OptionalNullable[str] = UNSET + + agent_id: OptionalNullable[str] = UNSET + + confirmation_status: OptionalNullable[FunctionCallEventConfirmationStatus] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + ["created_at", "output_index", "model", "agent_id", "confirmation_status"] + ) + nullable_fields = set(["model", "agent_id", "confirmation_status"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + FunctionCallEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/functionresultentry.py b/src/mistralai/client/models/functionresultentry.py index ca73cbb7..01e2e36f 100644 --- a/src/mistralai/client/models/functionresultentry.py +++ b/src/mistralai/client/models/functionresultentry.py @@ -10,22 +10,19 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -FunctionResultEntryObject = Literal["entry",] - - -FunctionResultEntryType = Literal["function.result",] +from typing_extensions import Annotated, NotRequired, TypedDict class FunctionResultEntryTypedDict(TypedDict): tool_call_id: str result: str - object: NotRequired[FunctionResultEntryObject] - type: NotRequired[FunctionResultEntryType] + object: Literal["entry"] + type: Literal["function.result"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] id: NotRequired[str] @@ -36,9 +33,18 @@ class FunctionResultEntry(BaseModel): result: str - object: Optional[FunctionResultEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[FunctionResultEntryType] = "function.result" + type: Annotated[ + Annotated[ + Optional[Literal["function.result"]], + AfterValidator(validate_const("function.result")), + ], + pydantic.Field(alias="type"), + ] = "function.result" created_at: Optional[datetime] = None @@ -48,30 +54,31 @@ class FunctionResultEntry(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["object", "type", "created_at", "completed_at", "id"] - nullable_fields = ["completed_at"] - null_default_fields = [] - + optional_fields = set(["object", "type", "created_at", "completed_at", "id"]) + nullable_fields = set(["completed_at"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + FunctionResultEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/functiontool.py b/src/mistralai/client/models/functiontool.py index 13b04496..eae87264 100644 --- a/src/mistralai/client/models/functiontool.py +++ b/src/mistralai/client/models/functiontool.py @@ -19,7 +19,13 @@ class FunctionToolTypedDict(TypedDict): class FunctionTool(BaseModel): function: Function - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["function"], AfterValidator(validate_const("function"))], pydantic.Field(alias="type"), ] = "function" + + +try: + FunctionTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/getagentop.py b/src/mistralai/client/models/getagentop.py deleted file mode 100644 index 55d8fe68..00000000 --- a/src/mistralai/client/models/getagentop.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 5a28bb1e727e - -from __future__ import annotations -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, -) -from mistralai.client.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata -from pydantic import model_serializer -from typing import Union -from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict - - -GetAgentAgentVersionTypedDict = TypeAliasType( - "GetAgentAgentVersionTypedDict", Union[int, str] -) - - -GetAgentAgentVersion = TypeAliasType("GetAgentAgentVersion", Union[int, str]) - - -class GetAgentRequestTypedDict(TypedDict): - agent_id: str - agent_version: NotRequired[Nullable[GetAgentAgentVersionTypedDict]] - - -class GetAgentRequest(BaseModel): - agent_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - agent_version: Annotated[ - OptionalNullable[GetAgentAgentVersion], - FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), - ] = UNSET - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = ["agent_version"] - nullable_fields = ["agent_version"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/getdocumenttextcontentop.py b/src/mistralai/client/models/getdocumenttextcontentop.py deleted file mode 100644 index 8a7b4aae..00000000 --- a/src/mistralai/client/models/getdocumenttextcontentop.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ba23717093ef - -from __future__ import annotations -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from typing_extensions import Annotated, TypedDict - - -class GetDocumentTextContentRequestTypedDict(TypedDict): - library_id: str - document_id: str - - -class GetDocumentTextContentRequest(BaseModel): - library_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - document_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] diff --git a/src/mistralai/client/models/retrievefileout.py b/src/mistralai/client/models/getfileresponse.py similarity index 69% rename from src/mistralai/client/models/retrievefileout.py rename to src/mistralai/client/models/getfileresponse.py index 2abf2161..f625c153 100644 --- a/src/mistralai/client/models/retrievefileout.py +++ b/src/mistralai/client/models/getfileresponse.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 8bb5859aa0d0 +# @generated-id: 81919086e371 from __future__ import annotations from .filepurpose import FilePurpose @@ -17,7 +17,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class RetrieveFileOutTypedDict(TypedDict): +class GetFileResponseTypedDict(TypedDict): id: str r"""The unique identifier of the file.""" object: str @@ -37,7 +37,7 @@ class RetrieveFileOutTypedDict(TypedDict): signature: NotRequired[Nullable[str]] -class RetrieveFileOut(BaseModel): +class GetFileResponse(BaseModel): id: str r"""The unique identifier of the file.""" @@ -69,30 +69,31 @@ class RetrieveFileOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["num_lines", "mimetype", "signature"] - nullable_fields = ["num_lines", "mimetype", "signature"] - null_default_fields = [] - + optional_fields = set(["num_lines", "mimetype", "signature"]) + nullable_fields = set(["num_lines", "mimetype", "signature"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + GetFileResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/getfinetuningjobop.py b/src/mistralai/client/models/getfinetuningjobop.py deleted file mode 100644 index 1fb732f4..00000000 --- a/src/mistralai/client/models/getfinetuningjobop.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: afe997f96d69 - -from __future__ import annotations -from .classifierdetailedjobout import ( - ClassifierDetailedJobOut, - ClassifierDetailedJobOutTypedDict, -) -from .completiondetailedjobout import ( - CompletionDetailedJobOut, - CompletionDetailedJobOutTypedDict, -) -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -class GetFineTuningJobRequestTypedDict(TypedDict): - job_id: str - r"""The ID of the job to analyse.""" - - -class GetFineTuningJobRequest(BaseModel): - job_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - r"""The ID of the job to analyse.""" - - -GetFineTuningJobResponseTypedDict = TypeAliasType( - "GetFineTuningJobResponseTypedDict", - Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict], -) -r"""OK""" - - -GetFineTuningJobResponse = Annotated[ - Union[ClassifierDetailedJobOut, CompletionDetailedJobOut], - Field(discriminator="JOB_TYPE"), -] -r"""OK""" diff --git a/src/mistralai/client/models/filesignedurl.py b/src/mistralai/client/models/getsignedurlresponse.py similarity index 65% rename from src/mistralai/client/models/filesignedurl.py rename to src/mistralai/client/models/getsignedurlresponse.py index 53dff812..4ba95894 100644 --- a/src/mistralai/client/models/filesignedurl.py +++ b/src/mistralai/client/models/getsignedurlresponse.py @@ -1,14 +1,14 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: a1754c725163 +# @generated-id: cee4e4197372 from __future__ import annotations from mistralai.client.types import BaseModel from typing_extensions import TypedDict -class FileSignedURLTypedDict(TypedDict): +class GetSignedURLResponseTypedDict(TypedDict): url: str -class FileSignedURL(BaseModel): +class GetSignedURLResponse(BaseModel): url: str diff --git a/src/mistralai/client/models/githubrepositoryout.py b/src/mistralai/client/models/githubrepository.py similarity index 59% rename from src/mistralai/client/models/githubrepositoryout.py rename to src/mistralai/client/models/githubrepository.py index 514df01c..84b01078 100644 --- a/src/mistralai/client/models/githubrepositoryout.py +++ b/src/mistralai/client/models/githubrepository.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d2434a167623 +# @generated-id: 4bc83ce18378 from __future__ import annotations from mistralai.client.types import ( @@ -17,7 +17,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class GithubRepositoryOutTypedDict(TypedDict): +class GithubRepositoryTypedDict(TypedDict): name: str owner: str commit_id: str @@ -26,14 +26,14 @@ class GithubRepositoryOutTypedDict(TypedDict): weight: NotRequired[float] -class GithubRepositoryOut(BaseModel): +class GithubRepository(BaseModel): name: str owner: str commit_id: str - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["github"], AfterValidator(validate_const("github"))], pydantic.Field(alias="type"), ] = "github" @@ -44,30 +44,31 @@ class GithubRepositoryOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["ref", "weight"] - nullable_fields = ["ref"] - null_default_fields = [] - + optional_fields = set(["ref", "weight"]) + nullable_fields = set(["ref"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + GithubRepository.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/githubrepositoryin.py b/src/mistralai/client/models/githubrepositoryin.py index e55389c3..38bcc208 100644 --- a/src/mistralai/client/models/githubrepositoryin.py +++ b/src/mistralai/client/models/githubrepositoryin.py @@ -33,7 +33,7 @@ class GithubRepositoryIn(BaseModel): token: str - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["github"], AfterValidator(validate_const("github"))], pydantic.Field(alias="type"), ] = "github" @@ -44,30 +44,31 @@ class GithubRepositoryIn(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["ref", "weight"] - nullable_fields = ["ref"] - null_default_fields = [] - + optional_fields = set(["ref", "weight"]) + nullable_fields = set(["ref"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + GithubRepositoryIn.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/imagedetail.py b/src/mistralai/client/models/imagedetail.py new file mode 100644 index 00000000..1982d357 --- /dev/null +++ b/src/mistralai/client/models/imagedetail.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: c1084b549abb + +from __future__ import annotations +from mistralai.client.types import UnrecognizedStr +from typing import Literal, Union + + +ImageDetail = Union[ + Literal[ + "low", + "auto", + "high", + ], + UnrecognizedStr, +] diff --git a/src/mistralai/client/models/imagegenerationtool.py b/src/mistralai/client/models/imagegenerationtool.py index 680c6ce2..c1789b18 100644 --- a/src/mistralai/client/models/imagegenerationtool.py +++ b/src/mistralai/client/models/imagegenerationtool.py @@ -2,23 +2,65 @@ # @generated-id: e1532275faa0 from __future__ import annotations -from mistralai.client.types import BaseModel +from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, TypedDict class ImageGenerationToolTypedDict(TypedDict): + tool_configuration: NotRequired[Nullable[ToolConfigurationTypedDict]] type: Literal["image_generation"] class ImageGenerationTool(BaseModel): - TYPE: Annotated[ + tool_configuration: OptionalNullable[ToolConfiguration] = UNSET + + type: Annotated[ Annotated[ Literal["image_generation"], AfterValidator(validate_const("image_generation")), ], pydantic.Field(alias="type"), ] = "image_generation" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["tool_configuration"]) + nullable_fields = set(["tool_configuration"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + ImageGenerationTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/imageurl.py b/src/mistralai/client/models/imageurl.py index 4ff13b1c..ac1030f5 100644 --- a/src/mistralai/client/models/imageurl.py +++ b/src/mistralai/client/models/imageurl.py @@ -2,6 +2,7 @@ # @generated-id: e4bbf5881fbf from __future__ import annotations +from .imagedetail import ImageDetail from mistralai.client.types import ( BaseModel, Nullable, @@ -15,40 +16,35 @@ class ImageURLTypedDict(TypedDict): url: str - detail: NotRequired[Nullable[str]] + detail: NotRequired[Nullable[ImageDetail]] class ImageURL(BaseModel): url: str - detail: OptionalNullable[str] = UNSET + detail: OptionalNullable[ImageDetail] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["detail"] - nullable_fields = ["detail"] - null_default_fields = [] - + optional_fields = set(["detail"]) + nullable_fields = set(["detail"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/imageurlchunk.py b/src/mistralai/client/models/imageurlchunk.py index 993185cc..7134b46e 100644 --- a/src/mistralai/client/models/imageurlchunk.py +++ b/src/mistralai/client/models/imageurlchunk.py @@ -3,9 +3,13 @@ from __future__ import annotations from .imageurl import ImageURL, ImageURLTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict +from typing_extensions import Annotated, TypeAliasType, TypedDict ImageURLUnionTypedDict = TypeAliasType( @@ -16,14 +20,11 @@ ImageURLUnion = TypeAliasType("ImageURLUnion", Union[ImageURL, str]) -ImageURLChunkType = Literal["image_url",] - - class ImageURLChunkTypedDict(TypedDict): r"""{\"type\":\"image_url\",\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0""" image_url: ImageURLUnionTypedDict - type: NotRequired[ImageURLChunkType] + type: Literal["image_url"] class ImageURLChunk(BaseModel): @@ -31,4 +32,31 @@ class ImageURLChunk(BaseModel): image_url: ImageURLUnion - type: Optional[ImageURLChunkType] = "image_url" + type: Annotated[ + Annotated[ + Optional[Literal["image_url"]], AfterValidator(validate_const("image_url")) + ], + pydantic.Field(alias="type"), + ] = "image_url" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ImageURLChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/inputentries.py b/src/mistralai/client/models/inputentries.py index dc989295..e2da5a80 100644 --- a/src/mistralai/client/models/inputentries.py +++ b/src/mistralai/client/models/inputentries.py @@ -17,10 +17,10 @@ Union[ FunctionResultEntryTypedDict, MessageInputEntryTypedDict, - FunctionCallEntryTypedDict, - ToolExecutionEntryTypedDict, MessageOutputEntryTypedDict, AgentHandoffEntryTypedDict, + ToolExecutionEntryTypedDict, + FunctionCallEntryTypedDict, ], ) @@ -30,9 +30,9 @@ Union[ FunctionResultEntry, MessageInputEntry, - FunctionCallEntry, - ToolExecutionEntry, MessageOutputEntry, AgentHandoffEntry, + ToolExecutionEntry, + FunctionCallEntry, ], ) diff --git a/src/mistralai/client/models/inputs.py b/src/mistralai/client/models/inputs.py index cfcdeb3d..9ecd7f48 100644 --- a/src/mistralai/client/models/inputs.py +++ b/src/mistralai/client/models/inputs.py @@ -2,54 +2,16 @@ # @generated-id: 84a8007518c7 from __future__ import annotations -from .assistantmessage import AssistantMessage, AssistantMessageTypedDict from .instructrequest import InstructRequest, InstructRequestTypedDict -from .systemmessage import SystemMessage, SystemMessageTypedDict -from .toolmessage import ToolMessage, ToolMessageTypedDict -from .usermessage import UserMessage, UserMessageTypedDict -from mistralai.client.types import BaseModel -from mistralai.client.utils import get_discriminator -from pydantic import Discriminator, Tag from typing import List, Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -InputsMessageTypedDict = TypeAliasType( - "InputsMessageTypedDict", - Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, - ], -) - - -InputsMessage = Annotated[ - Union[ - Annotated[AssistantMessage, Tag("assistant")], - Annotated[SystemMessage, Tag("system")], - Annotated[ToolMessage, Tag("tool")], - Annotated[UserMessage, Tag("user")], - ], - Discriminator(lambda m: get_discriminator(m, "role", "role")), -] - - -class InstructRequestInputsTypedDict(TypedDict): - messages: List[InputsMessageTypedDict] - - -class InstructRequestInputs(BaseModel): - messages: List[InputsMessage] +from typing_extensions import TypeAliasType InputsTypedDict = TypeAliasType( - "InputsTypedDict", - Union[InstructRequestInputsTypedDict, List[InstructRequestTypedDict]], + "InputsTypedDict", Union[InstructRequestTypedDict, List[InstructRequestTypedDict]] ) r"""Chat to classify""" -Inputs = TypeAliasType("Inputs", Union[InstructRequestInputs, List[InstructRequest]]) +Inputs = TypeAliasType("Inputs", Union[InstructRequest, List[InstructRequest]]) r"""Chat to classify""" diff --git a/src/mistralai/client/models/jobmetadataout.py b/src/mistralai/client/models/jobmetadata.py similarity index 52% rename from src/mistralai/client/models/jobmetadataout.py rename to src/mistralai/client/models/jobmetadata.py index 1d386539..f6e96fa1 100644 --- a/src/mistralai/client/models/jobmetadataout.py +++ b/src/mistralai/client/models/jobmetadata.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 805f41e3292a +# @generated-id: cfbdde7fc0a2 from __future__ import annotations from mistralai.client.types import ( @@ -13,7 +13,7 @@ from typing_extensions import NotRequired, TypedDict -class JobMetadataOutTypedDict(TypedDict): +class JobMetadataTypedDict(TypedDict): expected_duration_seconds: NotRequired[Nullable[int]] cost: NotRequired[Nullable[float]] cost_currency: NotRequired[Nullable[str]] @@ -23,7 +23,7 @@ class JobMetadataOutTypedDict(TypedDict): estimated_start_time: NotRequired[Nullable[int]] -class JobMetadataOut(BaseModel): +class JobMetadata(BaseModel): expected_duration_seconds: OptionalNullable[int] = UNSET cost: OptionalNullable[float] = UNSET @@ -40,46 +40,45 @@ class JobMetadataOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "expected_duration_seconds", - "cost", - "cost_currency", - "train_tokens_per_step", - "train_tokens", - "data_tokens", - "estimated_start_time", - ] - nullable_fields = [ - "expected_duration_seconds", - "cost", - "cost_currency", - "train_tokens_per_step", - "train_tokens", - "data_tokens", - "estimated_start_time", - ] - null_default_fields = [] - + optional_fields = set( + [ + "expected_duration_seconds", + "cost", + "cost_currency", + "train_tokens_per_step", + "train_tokens", + "data_tokens", + "estimated_start_time", + ] + ) + nullable_fields = set( + [ + "expected_duration_seconds", + "cost", + "cost_currency", + "train_tokens_per_step", + "train_tokens", + "data_tokens", + "estimated_start_time", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/cancelbatchjobop.py b/src/mistralai/client/models/jobs_api_routes_batch_cancel_batch_jobop.py similarity index 72% rename from src/mistralai/client/models/cancelbatchjobop.py rename to src/mistralai/client/models/jobs_api_routes_batch_cancel_batch_jobop.py index cd94ee86..de2e6347 100644 --- a/src/mistralai/client/models/cancelbatchjobop.py +++ b/src/mistralai/client/models/jobs_api_routes_batch_cancel_batch_jobop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: cebac10b56a9 +# @generated-id: b56cb6c17c95 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class CancelBatchJobRequestTypedDict(TypedDict): +class JobsAPIRoutesBatchCancelBatchJobRequestTypedDict(TypedDict): job_id: str -class CancelBatchJobRequest(BaseModel): +class JobsAPIRoutesBatchCancelBatchJobRequest(BaseModel): job_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getbatchjobop.py b/src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobop.py similarity index 56% rename from src/mistralai/client/models/getbatchjobop.py rename to src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobop.py index 792c3e21..d779e1d9 100644 --- a/src/mistralai/client/models/getbatchjobop.py +++ b/src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 443103fe3b88 +# @generated-id: 36b5a6b3ceee from __future__ import annotations from mistralai.client.types import ( @@ -14,12 +14,12 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class GetBatchJobRequestTypedDict(TypedDict): +class JobsAPIRoutesBatchGetBatchJobRequestTypedDict(TypedDict): job_id: str inline: NotRequired[Nullable[bool]] -class GetBatchJobRequest(BaseModel): +class JobsAPIRoutesBatchGetBatchJobRequest(BaseModel): job_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] @@ -31,30 +31,25 @@ class GetBatchJobRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["inline"] - nullable_fields = ["inline"] - null_default_fields = [] - + optional_fields = set(["inline"]) + nullable_fields = set(["inline"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/listbatchjobsop.py b/src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobsop.py similarity index 71% rename from src/mistralai/client/models/listbatchjobsop.py rename to src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobsop.py index 5322df81..89ac3c93 100644 --- a/src/mistralai/client/models/listbatchjobsop.py +++ b/src/mistralai/client/models/jobs_api_routes_batch_get_batch_jobsop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: f49af453f5e6 +# @generated-id: d8f0af99c94d from __future__ import annotations from .batchjobstatus import BatchJobStatus @@ -23,7 +23,7 @@ ] -class ListBatchJobsRequestTypedDict(TypedDict): +class JobsAPIRoutesBatchGetBatchJobsRequestTypedDict(TypedDict): page: NotRequired[int] page_size: NotRequired[int] model: NotRequired[Nullable[str]] @@ -35,7 +35,7 @@ class ListBatchJobsRequestTypedDict(TypedDict): order_by: NotRequired[OrderBy] -class ListBatchJobsRequest(BaseModel): +class JobsAPIRoutesBatchGetBatchJobsRequest(BaseModel): page: Annotated[ Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), @@ -83,40 +83,39 @@ class ListBatchJobsRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "page", - "page_size", - "model", - "agent_id", - "metadata", - "created_after", - "created_by_me", - "status", - "order_by", - ] - nullable_fields = ["model", "agent_id", "metadata", "created_after", "status"] - null_default_fields = [] - + optional_fields = set( + [ + "page", + "page_size", + "model", + "agent_id", + "metadata", + "created_after", + "created_by_me", + "status", + "order_by", + ] + ) + nullable_fields = set( + ["model", "agent_id", "metadata", "created_after", "status"] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/archivemodelop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py similarity index 73% rename from src/mistralai/client/models/archivemodelop.py rename to src/mistralai/client/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py index 30b4a9bd..9fa99837 100644 --- a/src/mistralai/client/models/archivemodelop.py +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: beefa1df3b7c +# @generated-id: 34f89d2af0ec from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class ArchiveModelRequestTypedDict(TypedDict): +class JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict(TypedDict): model_id: str r"""The ID of the model to archive.""" -class ArchiveModelRequest(BaseModel): +class JobsAPIRoutesFineTuningArchiveFineTunedModelRequest(BaseModel): model_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py new file mode 100644 index 00000000..56fa5340 --- /dev/null +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py @@ -0,0 +1,78 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: d175c6e32ecb + +from __future__ import annotations +from .classifierfinetuningjobdetails import ( + ClassifierFineTuningJobDetails, + ClassifierFineTuningJobDetailsTypedDict, +) +from .completionfinetuningjobdetails import ( + CompletionFineTuningJobDetails, + CompletionFineTuningJobDetailsTypedDict, +) +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType, TypedDict + + +class JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict(TypedDict): + job_id: str + r"""The ID of the job to cancel.""" + + +class JobsAPIRoutesFineTuningCancelFineTuningJobRequest(BaseModel): + job_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + r"""The ID of the job to cancel.""" + + +JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict", + Union[ + CompletionFineTuningJobDetailsTypedDict, ClassifierFineTuningJobDetailsTypedDict + ], +) +r"""OK""" + + +class UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse(BaseModel): + r"""A JobsAPIRoutesFineTuningCancelFineTuningJobResponse variant the SDK doesn't recognize. Preserves the raw payload.""" + + job_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_JOBS_API_ROUTES_FINE_TUNING_CANCEL_FINE_TUNING_JOB_RESPONSE_VARIANTS: dict[ + str, Any +] = { + "classifier": ClassifierFineTuningJobDetails, + "completion": CompletionFineTuningJobDetails, +} + + +JobsAPIRoutesFineTuningCancelFineTuningJobResponse = Annotated[ + Union[ + ClassifierFineTuningJobDetails, + CompletionFineTuningJobDetails, + UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse, + ], + BeforeValidator( + partial( + parse_open_union, + disc_key="job_type", + variants=_JOBS_API_ROUTES_FINE_TUNING_CANCEL_FINE_TUNING_JOB_RESPONSE_VARIANTS, + unknown_cls=UnknownJobsAPIRoutesFineTuningCancelFineTuningJobResponse, + union_name="JobsAPIRoutesFineTuningCancelFineTuningJobResponse", + ) + ), +] +r"""OK""" diff --git a/src/mistralai/client/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py new file mode 100644 index 00000000..db857f7d --- /dev/null +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py @@ -0,0 +1,70 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 81651291187a + +from __future__ import annotations +from .classifierfinetuningjob import ( + ClassifierFineTuningJob, + ClassifierFineTuningJobTypedDict, +) +from .completionfinetuningjob import ( + CompletionFineTuningJob, + CompletionFineTuningJobTypedDict, +) +from .legacyjobmetadata import LegacyJobMetadata, LegacyJobMetadataTypedDict +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType + + +ResponseTypedDict = TypeAliasType( + "ResponseTypedDict", + Union[ClassifierFineTuningJobTypedDict, CompletionFineTuningJobTypedDict], +) + + +class UnknownResponse(BaseModel): + r"""A Response variant the SDK doesn't recognize. Preserves the raw payload.""" + + job_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_RESPONSE_VARIANTS: dict[str, Any] = { + "classifier": ClassifierFineTuningJob, + "completion": CompletionFineTuningJob, +} + + +Response = Annotated[ + Union[ClassifierFineTuningJob, CompletionFineTuningJob, UnknownResponse], + BeforeValidator( + partial( + parse_open_union, + disc_key="job_type", + variants=_RESPONSE_VARIANTS, + unknown_cls=UnknownResponse, + union_name="Response", + ) + ), +] + + +JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict", + Union[LegacyJobMetadataTypedDict, ResponseTypedDict], +) +r"""OK""" + + +JobsAPIRoutesFineTuningCreateFineTuningJobResponse = TypeAliasType( + "JobsAPIRoutesFineTuningCreateFineTuningJobResponse", + Union[LegacyJobMetadata, Response], +) +r"""OK""" diff --git a/src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py new file mode 100644 index 00000000..ddd9c189 --- /dev/null +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py @@ -0,0 +1,76 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: d910fd8fe2d6 + +from __future__ import annotations +from .classifierfinetuningjobdetails import ( + ClassifierFineTuningJobDetails, + ClassifierFineTuningJobDetailsTypedDict, +) +from .completionfinetuningjobdetails import ( + CompletionFineTuningJobDetails, + CompletionFineTuningJobDetailsTypedDict, +) +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType, TypedDict + + +class JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict(TypedDict): + job_id: str + r"""The ID of the job to analyse.""" + + +class JobsAPIRoutesFineTuningGetFineTuningJobRequest(BaseModel): + job_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + r"""The ID of the job to analyse.""" + + +JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict", + Union[ + CompletionFineTuningJobDetailsTypedDict, ClassifierFineTuningJobDetailsTypedDict + ], +) +r"""OK""" + + +class UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse(BaseModel): + r"""A JobsAPIRoutesFineTuningGetFineTuningJobResponse variant the SDK doesn't recognize. Preserves the raw payload.""" + + job_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_JOBS_API_ROUTES_FINE_TUNING_GET_FINE_TUNING_JOB_RESPONSE_VARIANTS: dict[str, Any] = { + "classifier": ClassifierFineTuningJobDetails, + "completion": CompletionFineTuningJobDetails, +} + + +JobsAPIRoutesFineTuningGetFineTuningJobResponse = Annotated[ + Union[ + ClassifierFineTuningJobDetails, + CompletionFineTuningJobDetails, + UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse, + ], + BeforeValidator( + partial( + parse_open_union, + disc_key="job_type", + variants=_JOBS_API_ROUTES_FINE_TUNING_GET_FINE_TUNING_JOB_RESPONSE_VARIANTS, + unknown_cls=UnknownJobsAPIRoutesFineTuningGetFineTuningJobResponse, + union_name="JobsAPIRoutesFineTuningGetFineTuningJobResponse", + ) + ), +] +r"""OK""" diff --git a/src/mistralai/client/models/listfinetuningjobsop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py similarity index 75% rename from src/mistralai/client/models/listfinetuningjobsop.py rename to src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py index 8712c3fa..ec80a158 100644 --- a/src/mistralai/client/models/listfinetuningjobsop.py +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: b77fe203b929 +# @generated-id: cf43028824bf from __future__ import annotations from datetime import datetime @@ -16,7 +16,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -ListFineTuningJobsStatus = Literal[ +JobsAPIRoutesFineTuningGetFineTuningJobsStatus = Literal[ "QUEUED", "STARTED", "VALIDATING", @@ -31,7 +31,7 @@ r"""The current job state to filter on. When set, the other results are not displayed.""" -class ListFineTuningJobsRequestTypedDict(TypedDict): +class JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict(TypedDict): page: NotRequired[int] r"""The page number of the results to be returned.""" page_size: NotRequired[int] @@ -43,7 +43,7 @@ class ListFineTuningJobsRequestTypedDict(TypedDict): created_before: NotRequired[Nullable[datetime]] created_by_me: NotRequired[bool] r"""When set, only return results for jobs created by the API caller. Other results are not displayed.""" - status: NotRequired[Nullable[ListFineTuningJobsStatus]] + status: NotRequired[Nullable[JobsAPIRoutesFineTuningGetFineTuningJobsStatus]] r"""The current job state to filter on. When set, the other results are not displayed.""" wandb_project: NotRequired[Nullable[str]] r"""The Weights and Biases project to filter on. When set, the other results are not displayed.""" @@ -53,7 +53,7 @@ class ListFineTuningJobsRequestTypedDict(TypedDict): r"""The model suffix to filter on. When set, the other results are not displayed.""" -class ListFineTuningJobsRequest(BaseModel): +class JobsAPIRoutesFineTuningGetFineTuningJobsRequest(BaseModel): page: Annotated[ Optional[int], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), @@ -90,7 +90,7 @@ class ListFineTuningJobsRequest(BaseModel): r"""When set, only return results for jobs created by the API caller. Other results are not displayed.""" status: Annotated[ - OptionalNullable[ListFineTuningJobsStatus], + OptionalNullable[JobsAPIRoutesFineTuningGetFineTuningJobsStatus], FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), ] = UNSET r"""The current job state to filter on. When set, the other results are not displayed.""" @@ -115,49 +115,48 @@ class ListFineTuningJobsRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "page", - "page_size", - "model", - "created_after", - "created_before", - "created_by_me", - "status", - "wandb_project", - "wandb_name", - "suffix", - ] - nullable_fields = [ - "model", - "created_after", - "created_before", - "status", - "wandb_project", - "wandb_name", - "suffix", - ] - null_default_fields = [] - + optional_fields = set( + [ + "page", + "page_size", + "model", + "created_after", + "created_before", + "created_by_me", + "status", + "wandb_project", + "wandb_name", + "suffix", + ] + ) + nullable_fields = set( + [ + "model", + "created_after", + "created_before", + "status", + "wandb_project", + "wandb_name", + "suffix", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py new file mode 100644 index 00000000..cd25fa04 --- /dev/null +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py @@ -0,0 +1,74 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: e7ff4a4a4edb + +from __future__ import annotations +from .classifierfinetuningjobdetails import ( + ClassifierFineTuningJobDetails, + ClassifierFineTuningJobDetailsTypedDict, +) +from .completionfinetuningjobdetails import ( + CompletionFineTuningJobDetails, + CompletionFineTuningJobDetailsTypedDict, +) +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType, TypedDict + + +class JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict(TypedDict): + job_id: str + + +class JobsAPIRoutesFineTuningStartFineTuningJobRequest(BaseModel): + job_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + +JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict", + Union[ + CompletionFineTuningJobDetailsTypedDict, ClassifierFineTuningJobDetailsTypedDict + ], +) +r"""OK""" + + +class UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse(BaseModel): + r"""A JobsAPIRoutesFineTuningStartFineTuningJobResponse variant the SDK doesn't recognize. Preserves the raw payload.""" + + job_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_JOBS_API_ROUTES_FINE_TUNING_START_FINE_TUNING_JOB_RESPONSE_VARIANTS: dict[str, Any] = { + "classifier": ClassifierFineTuningJobDetails, + "completion": CompletionFineTuningJobDetails, +} + + +JobsAPIRoutesFineTuningStartFineTuningJobResponse = Annotated[ + Union[ + ClassifierFineTuningJobDetails, + CompletionFineTuningJobDetails, + UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse, + ], + BeforeValidator( + partial( + parse_open_union, + disc_key="job_type", + variants=_JOBS_API_ROUTES_FINE_TUNING_START_FINE_TUNING_JOB_RESPONSE_VARIANTS, + unknown_cls=UnknownJobsAPIRoutesFineTuningStartFineTuningJobResponse, + union_name="JobsAPIRoutesFineTuningStartFineTuningJobResponse", + ) + ), +] +r"""OK""" diff --git a/src/mistralai/client/models/unarchivemodelop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py similarity index 73% rename from src/mistralai/client/models/unarchivemodelop.py rename to src/mistralai/client/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py index 1d68a06a..fd01fe69 100644 --- a/src/mistralai/client/models/unarchivemodelop.py +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: eb18584fd78c +# @generated-id: 7cc1c80335a9 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class UnarchiveModelRequestTypedDict(TypedDict): +class JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict(TypedDict): model_id: str r"""The ID of the model to unarchive.""" -class UnarchiveModelRequest(BaseModel): +class JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest(BaseModel): model_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py b/src/mistralai/client/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py new file mode 100644 index 00000000..296070b4 --- /dev/null +++ b/src/mistralai/client/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py @@ -0,0 +1,83 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 6d9dc624aafd + +from __future__ import annotations +from .classifierfinetunedmodel import ( + ClassifierFineTunedModel, + ClassifierFineTunedModelTypedDict, +) +from .completionfinetunedmodel import ( + CompletionFineTunedModel, + CompletionFineTunedModelTypedDict, +) +from .updatemodelrequest import UpdateModelRequest, UpdateModelRequestTypedDict +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata, RequestMetadata +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType, TypedDict + + +class JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict(TypedDict): + model_id: str + r"""The ID of the model to update.""" + update_model_request: UpdateModelRequestTypedDict + + +class JobsAPIRoutesFineTuningUpdateFineTunedModelRequest(BaseModel): + model_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + r"""The ID of the model to update.""" + + update_model_request: Annotated[ + UpdateModelRequest, + FieldMetadata(request=RequestMetadata(media_type="application/json")), + ] + + +JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict", + Union[CompletionFineTunedModelTypedDict, ClassifierFineTunedModelTypedDict], +) +r"""OK""" + + +class UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse(BaseModel): + r"""A JobsAPIRoutesFineTuningUpdateFineTunedModelResponse variant the SDK doesn't recognize. Preserves the raw payload.""" + + model_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_JOBS_API_ROUTES_FINE_TUNING_UPDATE_FINE_TUNED_MODEL_RESPONSE_VARIANTS: dict[ + str, Any +] = { + "classifier": ClassifierFineTunedModel, + "completion": CompletionFineTunedModel, +} + + +JobsAPIRoutesFineTuningUpdateFineTunedModelResponse = Annotated[ + Union[ + ClassifierFineTunedModel, + CompletionFineTunedModel, + UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse, + ], + BeforeValidator( + partial( + parse_open_union, + disc_key="model_type", + variants=_JOBS_API_ROUTES_FINE_TUNING_UPDATE_FINE_TUNED_MODEL_RESPONSE_VARIANTS, + unknown_cls=UnknownJobsAPIRoutesFineTuningUpdateFineTunedModelResponse, + union_name="JobsAPIRoutesFineTuningUpdateFineTunedModelResponse", + ) + ), +] +r"""OK""" diff --git a/src/mistralai/client/models/jobsout.py b/src/mistralai/client/models/jobsout.py deleted file mode 100644 index a4127a5d..00000000 --- a/src/mistralai/client/models/jobsout.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 22e91e9631a9 - -from __future__ import annotations -from .classifierjobout import ClassifierJobOut, ClassifierJobOutTypedDict -from .completionjobout import CompletionJobOut, CompletionJobOutTypedDict -from mistralai.client.types import BaseModel -from mistralai.client.utils import validate_const -import pydantic -from pydantic import Field -from pydantic.functional_validators import AfterValidator -from typing import List, Literal, Optional, Union -from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict - - -JobsOutDataTypedDict = TypeAliasType( - "JobsOutDataTypedDict", Union[ClassifierJobOutTypedDict, CompletionJobOutTypedDict] -) - - -JobsOutData = Annotated[ - Union[ClassifierJobOut, CompletionJobOut], Field(discriminator="JOB_TYPE") -] - - -class JobsOutTypedDict(TypedDict): - total: int - data: NotRequired[List[JobsOutDataTypedDict]] - object: Literal["list"] - - -class JobsOut(BaseModel): - total: int - - data: Optional[List[JobsOutData]] = None - - OBJECT: Annotated[ - Annotated[Optional[Literal["list"]], AfterValidator(validate_const("list"))], - pydantic.Field(alias="object"), - ] = "list" diff --git a/src/mistralai/client/models/jsonschema.py b/src/mistralai/client/models/jsonschema.py index 948c94ed..dfababa6 100644 --- a/src/mistralai/client/models/jsonschema.py +++ b/src/mistralai/client/models/jsonschema.py @@ -33,30 +33,31 @@ class JSONSchema(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["description", "strict"] - nullable_fields = ["description"] - null_default_fields = [] - + optional_fields = set(["description", "strict"]) + nullable_fields = set(["description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + JSONSchema.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/legacyjobmetadataout.py b/src/mistralai/client/models/legacyjobmetadata.py similarity index 70% rename from src/mistralai/client/models/legacyjobmetadataout.py rename to src/mistralai/client/models/legacyjobmetadata.py index 4453c157..57576758 100644 --- a/src/mistralai/client/models/legacyjobmetadataout.py +++ b/src/mistralai/client/models/legacyjobmetadata.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4f44aa38c864 +# @generated-id: 0330b8930f65 from __future__ import annotations from mistralai.client.types import ( @@ -17,7 +17,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class LegacyJobMetadataOutTypedDict(TypedDict): +class LegacyJobMetadataTypedDict(TypedDict): details: str expected_duration_seconds: NotRequired[Nullable[int]] r"""The approximated time (in seconds) for the fine-tuning process to complete.""" @@ -40,7 +40,7 @@ class LegacyJobMetadataOutTypedDict(TypedDict): object: Literal["job.metadata"] -class LegacyJobMetadataOut(BaseModel): +class LegacyJobMetadata(BaseModel): details: str expected_duration_seconds: OptionalNullable[int] = UNSET @@ -71,7 +71,7 @@ class LegacyJobMetadataOut(BaseModel): training_steps: OptionalNullable[int] = UNSET r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset.""" - OBJECT: Annotated[ + object: Annotated[ Annotated[ Optional[Literal["job.metadata"]], AfterValidator(validate_const("job.metadata")), @@ -81,52 +81,57 @@ class LegacyJobMetadataOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "expected_duration_seconds", - "cost", - "cost_currency", - "train_tokens_per_step", - "train_tokens", - "data_tokens", - "estimated_start_time", - "deprecated", - "epochs", - "training_steps", - "object", - ] - nullable_fields = [ - "expected_duration_seconds", - "cost", - "cost_currency", - "train_tokens_per_step", - "train_tokens", - "data_tokens", - "estimated_start_time", - "epochs", - "training_steps", - ] - null_default_fields = [] - + optional_fields = set( + [ + "expected_duration_seconds", + "cost", + "cost_currency", + "train_tokens_per_step", + "train_tokens", + "data_tokens", + "estimated_start_time", + "deprecated", + "epochs", + "training_steps", + "object", + ] + ) + nullable_fields = set( + [ + "expected_duration_seconds", + "cost", + "cost_currency", + "train_tokens_per_step", + "train_tokens", + "data_tokens", + "estimated_start_time", + "epochs", + "training_steps", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + LegacyJobMetadata.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/deletelibraryop.py b/src/mistralai/client/models/libraries_delete_v1op.py similarity index 76% rename from src/mistralai/client/models/deletelibraryop.py rename to src/mistralai/client/models/libraries_delete_v1op.py index 5eb6fc31..893ab53b 100644 --- a/src/mistralai/client/models/deletelibraryop.py +++ b/src/mistralai/client/models/libraries_delete_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: cd0ce9bf8d51 +# @generated-id: b2e8bbd19baa from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class DeleteLibraryRequestTypedDict(TypedDict): +class LibrariesDeleteV1RequestTypedDict(TypedDict): library_id: str -class DeleteLibraryRequest(BaseModel): +class LibrariesDeleteV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deletedocumentop.py b/src/mistralai/client/models/libraries_documents_delete_v1op.py similarity index 79% rename from src/mistralai/client/models/deletedocumentop.py rename to src/mistralai/client/models/libraries_documents_delete_v1op.py index 400070a4..0495832e 100644 --- a/src/mistralai/client/models/deletedocumentop.py +++ b/src/mistralai/client/models/libraries_documents_delete_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 62522db1ccf2 +# @generated-id: 81eb34382a3d from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class DeleteDocumentRequestTypedDict(TypedDict): +class LibrariesDocumentsDeleteV1RequestTypedDict(TypedDict): library_id: str document_id: str -class DeleteDocumentRequest(BaseModel): +class LibrariesDocumentsDeleteV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/libraries_documents_get_extracted_text_signed_url_v1op.py b/src/mistralai/client/models/libraries_documents_get_extracted_text_signed_url_v1op.py new file mode 100644 index 00000000..186baaed --- /dev/null +++ b/src/mistralai/client/models/libraries_documents_get_extracted_text_signed_url_v1op.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: a7417ebd6040 + +from __future__ import annotations +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from typing_extensions import Annotated, TypedDict + + +class LibrariesDocumentsGetExtractedTextSignedURLV1RequestTypedDict(TypedDict): + library_id: str + document_id: str + + +class LibrariesDocumentsGetExtractedTextSignedURLV1Request(BaseModel): + library_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + document_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] diff --git a/src/mistralai/client/models/libraries_documents_get_signed_url_v1op.py b/src/mistralai/client/models/libraries_documents_get_signed_url_v1op.py new file mode 100644 index 00000000..ebcf85d7 --- /dev/null +++ b/src/mistralai/client/models/libraries_documents_get_signed_url_v1op.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: d4b7b47913ba + +from __future__ import annotations +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from typing_extensions import Annotated, TypedDict + + +class LibrariesDocumentsGetSignedURLV1RequestTypedDict(TypedDict): + library_id: str + document_id: str + + +class LibrariesDocumentsGetSignedURLV1Request(BaseModel): + library_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + document_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] diff --git a/src/mistralai/client/models/getdocumentop.py b/src/mistralai/client/models/libraries_documents_get_status_v1op.py similarity index 78% rename from src/mistralai/client/models/getdocumentop.py rename to src/mistralai/client/models/libraries_documents_get_status_v1op.py index d7b07db7..1f484787 100644 --- a/src/mistralai/client/models/getdocumentop.py +++ b/src/mistralai/client/models/libraries_documents_get_status_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: de89ff93d373 +# @generated-id: f314f73e909c from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetDocumentRequestTypedDict(TypedDict): +class LibrariesDocumentsGetStatusV1RequestTypedDict(TypedDict): library_id: str document_id: str -class GetDocumentRequest(BaseModel): +class LibrariesDocumentsGetStatusV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getdocumentextractedtextsignedurlop.py b/src/mistralai/client/models/libraries_documents_get_text_content_v1op.py similarity index 77% rename from src/mistralai/client/models/getdocumentextractedtextsignedurlop.py rename to src/mistralai/client/models/libraries_documents_get_text_content_v1op.py index 9a71181d..e0508d66 100644 --- a/src/mistralai/client/models/getdocumentextractedtextsignedurlop.py +++ b/src/mistralai/client/models/libraries_documents_get_text_content_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 69099395d631 +# @generated-id: 1ca4e0c41321 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetDocumentExtractedTextSignedURLRequestTypedDict(TypedDict): +class LibrariesDocumentsGetTextContentV1RequestTypedDict(TypedDict): library_id: str document_id: str -class GetDocumentExtractedTextSignedURLRequest(BaseModel): +class LibrariesDocumentsGetTextContentV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getdocumentstatusop.py b/src/mistralai/client/models/libraries_documents_get_v1op.py similarity index 80% rename from src/mistralai/client/models/getdocumentstatusop.py rename to src/mistralai/client/models/libraries_documents_get_v1op.py index 4206f593..857dfbe6 100644 --- a/src/mistralai/client/models/getdocumentstatusop.py +++ b/src/mistralai/client/models/libraries_documents_get_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: f1f40b8f003f +# @generated-id: 26ff35f0c69d from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetDocumentStatusRequestTypedDict(TypedDict): +class LibrariesDocumentsGetV1RequestTypedDict(TypedDict): library_id: str document_id: str -class GetDocumentStatusRequest(BaseModel): +class LibrariesDocumentsGetV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/listdocumentsop.py b/src/mistralai/client/models/libraries_documents_list_v1op.py similarity index 67% rename from src/mistralai/client/models/listdocumentsop.py rename to src/mistralai/client/models/libraries_documents_list_v1op.py index 0f7c4584..da7d793b 100644 --- a/src/mistralai/client/models/listdocumentsop.py +++ b/src/mistralai/client/models/libraries_documents_list_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 3e42bdc15383 +# @generated-id: 756f26de3cbe from __future__ import annotations from mistralai.client.types import ( @@ -15,7 +15,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class ListDocumentsRequestTypedDict(TypedDict): +class LibrariesDocumentsListV1RequestTypedDict(TypedDict): library_id: str search: NotRequired[Nullable[str]] page_size: NotRequired[int] @@ -25,7 +25,7 @@ class ListDocumentsRequestTypedDict(TypedDict): sort_order: NotRequired[str] -class ListDocumentsRequest(BaseModel): +class LibrariesDocumentsListV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] @@ -62,37 +62,34 @@ class ListDocumentsRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "search", - "page_size", - "page", - "filters_attributes", - "sort_by", - "sort_order", - ] - nullable_fields = ["search", "filters_attributes"] - null_default_fields = [] - + optional_fields = set( + [ + "search", + "page_size", + "page", + "filters_attributes", + "sort_by", + "sort_order", + ] + ) + nullable_fields = set(["search", "filters_attributes"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/getdocumentsignedurlop.py b/src/mistralai/client/models/libraries_documents_reprocess_v1op.py similarity index 78% rename from src/mistralai/client/models/getdocumentsignedurlop.py rename to src/mistralai/client/models/libraries_documents_reprocess_v1op.py index e5d56c54..a2f9ba2a 100644 --- a/src/mistralai/client/models/getdocumentsignedurlop.py +++ b/src/mistralai/client/models/libraries_documents_reprocess_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: b8d95511c6d1 +# @generated-id: dbbeb02fc336 from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,12 +7,12 @@ from typing_extensions import Annotated, TypedDict -class GetDocumentSignedURLRequestTypedDict(TypedDict): +class LibrariesDocumentsReprocessV1RequestTypedDict(TypedDict): library_id: str document_id: str -class GetDocumentSignedURLRequest(BaseModel): +class LibrariesDocumentsReprocessV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/updatedocumentop.py b/src/mistralai/client/models/libraries_documents_update_v1op.py similarity index 64% rename from src/mistralai/client/models/updatedocumentop.py rename to src/mistralai/client/models/libraries_documents_update_v1op.py index 073f22a9..7ad4231f 100644 --- a/src/mistralai/client/models/updatedocumentop.py +++ b/src/mistralai/client/models/libraries_documents_update_v1op.py @@ -1,20 +1,20 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: eee9ef317180 +# @generated-id: 734ba6c19f5f from __future__ import annotations -from .documentupdatein import DocumentUpdateIn, DocumentUpdateInTypedDict +from .updatedocumentrequest import UpdateDocumentRequest, UpdateDocumentRequestTypedDict from mistralai.client.types import BaseModel from mistralai.client.utils import FieldMetadata, PathParamMetadata, RequestMetadata from typing_extensions import Annotated, TypedDict -class UpdateDocumentRequestTypedDict(TypedDict): +class LibrariesDocumentsUpdateV1RequestTypedDict(TypedDict): library_id: str document_id: str - document_update_in: DocumentUpdateInTypedDict + update_document_request: UpdateDocumentRequestTypedDict -class UpdateDocumentRequest(BaseModel): +class LibrariesDocumentsUpdateV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] @@ -23,7 +23,7 @@ class UpdateDocumentRequest(BaseModel): str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] - document_update_in: Annotated[ - DocumentUpdateIn, + update_document_request: Annotated[ + UpdateDocumentRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] diff --git a/src/mistralai/client/models/uploaddocumentop.py b/src/mistralai/client/models/libraries_documents_upload_v1op.py similarity index 91% rename from src/mistralai/client/models/uploaddocumentop.py rename to src/mistralai/client/models/libraries_documents_upload_v1op.py index 2c957947..388633d1 100644 --- a/src/mistralai/client/models/uploaddocumentop.py +++ b/src/mistralai/client/models/libraries_documents_upload_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 0018fe7ff48c +# @generated-id: 744466971862 from __future__ import annotations from .file import File, FileTypedDict @@ -41,12 +41,12 @@ class DocumentUpload(BaseModel): """ -class UploadDocumentRequestTypedDict(TypedDict): +class LibrariesDocumentsUploadV1RequestTypedDict(TypedDict): library_id: str request_body: DocumentUploadTypedDict -class UploadDocumentRequest(BaseModel): +class LibrariesDocumentsUploadV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/getlibraryop.py b/src/mistralai/client/models/libraries_get_v1op.py similarity index 77% rename from src/mistralai/client/models/getlibraryop.py rename to src/mistralai/client/models/libraries_get_v1op.py index bc0b4a23..7a51d605 100644 --- a/src/mistralai/client/models/getlibraryop.py +++ b/src/mistralai/client/models/libraries_get_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: c84a92e23a90 +# @generated-id: d493f39e7ebb from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class GetLibraryRequestTypedDict(TypedDict): +class LibrariesGetV1RequestTypedDict(TypedDict): library_id: str -class GetLibraryRequest(BaseModel): +class LibrariesGetV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/updateorcreatelibraryaccessop.py b/src/mistralai/client/models/libraries_share_create_v1op.py similarity index 81% rename from src/mistralai/client/models/updateorcreatelibraryaccessop.py rename to src/mistralai/client/models/libraries_share_create_v1op.py index 1abe6eda..00ea7482 100644 --- a/src/mistralai/client/models/updateorcreatelibraryaccessop.py +++ b/src/mistralai/client/models/libraries_share_create_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ec9b15418f5c +# @generated-id: feaacfd46dd3 from __future__ import annotations from .sharingin import SharingIn, SharingInTypedDict @@ -8,12 +8,12 @@ from typing_extensions import Annotated, TypedDict -class UpdateOrCreateLibraryAccessRequestTypedDict(TypedDict): +class LibrariesShareCreateV1RequestTypedDict(TypedDict): library_id: str sharing_in: SharingInTypedDict -class UpdateOrCreateLibraryAccessRequest(BaseModel): +class LibrariesShareCreateV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/deletelibraryaccessop.py b/src/mistralai/client/models/libraries_share_delete_v1op.py similarity index 83% rename from src/mistralai/client/models/deletelibraryaccessop.py rename to src/mistralai/client/models/libraries_share_delete_v1op.py index ca14c3ff..eca3f86a 100644 --- a/src/mistralai/client/models/deletelibraryaccessop.py +++ b/src/mistralai/client/models/libraries_share_delete_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: df80945bcf19 +# @generated-id: 7f3a679ca384 from __future__ import annotations from .sharingdelete import SharingDelete, SharingDeleteTypedDict @@ -8,12 +8,12 @@ from typing_extensions import Annotated, TypedDict -class DeleteLibraryAccessRequestTypedDict(TypedDict): +class LibrariesShareDeleteV1RequestTypedDict(TypedDict): library_id: str sharing_delete: SharingDeleteTypedDict -class DeleteLibraryAccessRequest(BaseModel): +class LibrariesShareDeleteV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/listlibraryaccessesop.py b/src/mistralai/client/models/libraries_share_list_v1op.py similarity index 75% rename from src/mistralai/client/models/listlibraryaccessesop.py rename to src/mistralai/client/models/libraries_share_list_v1op.py index 2206310f..895a2590 100644 --- a/src/mistralai/client/models/listlibraryaccessesop.py +++ b/src/mistralai/client/models/libraries_share_list_v1op.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 581b332626b7 +# @generated-id: 8f0af379bf1c from __future__ import annotations from mistralai.client.types import BaseModel @@ -7,11 +7,11 @@ from typing_extensions import Annotated, TypedDict -class ListLibraryAccessesRequestTypedDict(TypedDict): +class LibrariesShareListV1RequestTypedDict(TypedDict): library_id: str -class ListLibraryAccessesRequest(BaseModel): +class LibrariesShareListV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] diff --git a/src/mistralai/client/models/updatelibraryop.py b/src/mistralai/client/models/libraries_update_v1op.py similarity index 60% rename from src/mistralai/client/models/updatelibraryop.py rename to src/mistralai/client/models/libraries_update_v1op.py index c5a1ad30..54b0ab70 100644 --- a/src/mistralai/client/models/updatelibraryop.py +++ b/src/mistralai/client/models/libraries_update_v1op.py @@ -1,24 +1,24 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4ba7acdb62c6 +# @generated-id: 92c8d4132252 from __future__ import annotations -from .libraryinupdate import LibraryInUpdate, LibraryInUpdateTypedDict +from .updatelibraryrequest import UpdateLibraryRequest, UpdateLibraryRequestTypedDict from mistralai.client.types import BaseModel from mistralai.client.utils import FieldMetadata, PathParamMetadata, RequestMetadata from typing_extensions import Annotated, TypedDict -class UpdateLibraryRequestTypedDict(TypedDict): +class LibrariesUpdateV1RequestTypedDict(TypedDict): library_id: str - library_in_update: LibraryInUpdateTypedDict + update_library_request: UpdateLibraryRequestTypedDict -class UpdateLibraryRequest(BaseModel): +class LibrariesUpdateV1Request(BaseModel): library_id: Annotated[ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] - library_in_update: Annotated[ - LibraryInUpdate, + update_library_request: Annotated[ + UpdateLibraryRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] diff --git a/src/mistralai/client/models/libraryout.py b/src/mistralai/client/models/library.py similarity index 58% rename from src/mistralai/client/models/libraryout.py rename to src/mistralai/client/models/library.py index c7ab7b8d..1953b6fb 100644 --- a/src/mistralai/client/models/libraryout.py +++ b/src/mistralai/client/models/library.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 4e608c7aafc4 +# @generated-id: 028a34b08f9c from __future__ import annotations from datetime import datetime @@ -14,7 +14,7 @@ from typing_extensions import NotRequired, TypedDict -class LibraryOutTypedDict(TypedDict): +class LibraryTypedDict(TypedDict): id: str name: str created_at: datetime @@ -34,7 +34,7 @@ class LibraryOutTypedDict(TypedDict): r"""Generated Name""" -class LibraryOut(BaseModel): +class Library(BaseModel): id: str name: str @@ -70,48 +70,47 @@ class LibraryOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "emoji", - "description", - "generated_description", - "explicit_user_members_count", - "explicit_workspace_members_count", - "org_sharing_role", - "generated_name", - ] - nullable_fields = [ - "owner_id", - "chunk_size", - "emoji", - "description", - "generated_description", - "explicit_user_members_count", - "explicit_workspace_members_count", - "org_sharing_role", - "generated_name", - ] - null_default_fields = [] - + optional_fields = set( + [ + "emoji", + "description", + "generated_description", + "explicit_user_members_count", + "explicit_workspace_members_count", + "org_sharing_role", + "generated_name", + ] + ) + nullable_fields = set( + [ + "owner_id", + "chunk_size", + "emoji", + "description", + "generated_description", + "explicit_user_members_count", + "explicit_workspace_members_count", + "org_sharing_role", + "generated_name", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/libraryinupdate.py b/src/mistralai/client/models/libraryinupdate.py deleted file mode 100644 index 328b2de3..00000000 --- a/src/mistralai/client/models/libraryinupdate.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 300a6bb02e6e - -from __future__ import annotations -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, -) -from pydantic import model_serializer -from typing_extensions import NotRequired, TypedDict - - -class LibraryInUpdateTypedDict(TypedDict): - name: NotRequired[Nullable[str]] - description: NotRequired[Nullable[str]] - - -class LibraryInUpdate(BaseModel): - name: OptionalNullable[str] = UNSET - - description: OptionalNullable[str] = UNSET - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = ["name", "description"] - nullable_fields = ["name", "description"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/listbatchjobsresponse.py b/src/mistralai/client/models/listbatchjobsresponse.py new file mode 100644 index 00000000..35a348a1 --- /dev/null +++ b/src/mistralai/client/models/listbatchjobsresponse.py @@ -0,0 +1,51 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 99d94c86a871 + +from __future__ import annotations +from .batchjob import BatchJob, BatchJobTypedDict +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import List, Literal, Optional +from typing_extensions import Annotated, NotRequired, TypedDict + + +class ListBatchJobsResponseTypedDict(TypedDict): + total: int + data: NotRequired[List[BatchJobTypedDict]] + object: Literal["list"] + + +class ListBatchJobsResponse(BaseModel): + total: int + + data: Optional[List[BatchJob]] = None + + object: Annotated[ + Annotated[Optional[Literal["list"]], AfterValidator(validate_const("list"))], + pydantic.Field(alias="object"), + ] = "list" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["data", "object"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ListBatchJobsResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/listdocumentout.py b/src/mistralai/client/models/listdocumentsresponse.py similarity index 60% rename from src/mistralai/client/models/listdocumentout.py rename to src/mistralai/client/models/listdocumentsresponse.py index a636b3de..c48b8c05 100644 --- a/src/mistralai/client/models/listdocumentout.py +++ b/src/mistralai/client/models/listdocumentsresponse.py @@ -1,20 +1,20 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: b2c96075ce00 +# @generated-id: f593d8e66833 from __future__ import annotations -from .documentout import DocumentOut, DocumentOutTypedDict +from .document import Document, DocumentTypedDict from .paginationinfo import PaginationInfo, PaginationInfoTypedDict from mistralai.client.types import BaseModel from typing import List from typing_extensions import TypedDict -class ListDocumentOutTypedDict(TypedDict): +class ListDocumentsResponseTypedDict(TypedDict): pagination: PaginationInfoTypedDict - data: List[DocumentOutTypedDict] + data: List[DocumentTypedDict] -class ListDocumentOut(BaseModel): +class ListDocumentsResponse(BaseModel): pagination: PaginationInfo - data: List[DocumentOut] + data: List[Document] diff --git a/src/mistralai/client/models/listfilesout.py b/src/mistralai/client/models/listfilesresponse.py similarity index 53% rename from src/mistralai/client/models/listfilesout.py rename to src/mistralai/client/models/listfilesresponse.py index 460822f7..10a60126 100644 --- a/src/mistralai/client/models/listfilesout.py +++ b/src/mistralai/client/models/listfilesresponse.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ae5fa21b141c +# @generated-id: 85d6d24c1a19 from __future__ import annotations from .fileschema import FileSchema, FileSchemaTypedDict @@ -15,13 +15,13 @@ from typing_extensions import NotRequired, TypedDict -class ListFilesOutTypedDict(TypedDict): +class ListFilesResponseTypedDict(TypedDict): data: List[FileSchemaTypedDict] object: str total: NotRequired[Nullable[int]] -class ListFilesOut(BaseModel): +class ListFilesResponse(BaseModel): data: List[FileSchema] object: str @@ -30,30 +30,25 @@ class ListFilesOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["total"] - nullable_fields = ["total"] - null_default_fields = [] - + optional_fields = set(["total"]) + nullable_fields = set(["total"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/listfinetuningjobsresponse.py b/src/mistralai/client/models/listfinetuningjobsresponse.py new file mode 100644 index 00000000..1e434c59 --- /dev/null +++ b/src/mistralai/client/models/listfinetuningjobsresponse.py @@ -0,0 +1,100 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 118e05dbfbbd + +from __future__ import annotations +from .classifierfinetuningjob import ( + ClassifierFineTuningJob, + ClassifierFineTuningJobTypedDict, +) +from .completionfinetuningjob import ( + CompletionFineTuningJob, + CompletionFineTuningJobTypedDict, +) +from functools import partial +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +from mistralai.client.utils.unions import parse_open_union +import pydantic +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator, BeforeValidator +from typing import Any, List, Literal, Optional, Union +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict + + +ListFineTuningJobsResponseDataTypedDict = TypeAliasType( + "ListFineTuningJobsResponseDataTypedDict", + Union[ClassifierFineTuningJobTypedDict, CompletionFineTuningJobTypedDict], +) + + +class UnknownListFineTuningJobsResponseData(BaseModel): + r"""A ListFineTuningJobsResponseData variant the SDK doesn't recognize. Preserves the raw payload.""" + + job_type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_LIST_FINE_TUNING_JOBS_RESPONSE_DATA_VARIANTS: dict[str, Any] = { + "classifier": ClassifierFineTuningJob, + "completion": CompletionFineTuningJob, +} + + +ListFineTuningJobsResponseData = Annotated[ + Union[ + ClassifierFineTuningJob, + CompletionFineTuningJob, + UnknownListFineTuningJobsResponseData, + ], + BeforeValidator( + partial( + parse_open_union, + disc_key="job_type", + variants=_LIST_FINE_TUNING_JOBS_RESPONSE_DATA_VARIANTS, + unknown_cls=UnknownListFineTuningJobsResponseData, + union_name="ListFineTuningJobsResponseData", + ) + ), +] + + +class ListFineTuningJobsResponseTypedDict(TypedDict): + total: int + data: NotRequired[List[ListFineTuningJobsResponseDataTypedDict]] + object: Literal["list"] + + +class ListFineTuningJobsResponse(BaseModel): + total: int + + data: Optional[List[ListFineTuningJobsResponseData]] = None + + object: Annotated[ + Annotated[Optional[Literal["list"]], AfterValidator(validate_const("list"))], + pydantic.Field(alias="object"), + ] = "list" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["data", "object"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ListFineTuningJobsResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/listlibrariesresponse.py b/src/mistralai/client/models/listlibrariesresponse.py new file mode 100644 index 00000000..337fe105 --- /dev/null +++ b/src/mistralai/client/models/listlibrariesresponse.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: df556a618365 + +from __future__ import annotations +from .library import Library, LibraryTypedDict +from mistralai.client.types import BaseModel +from typing import List +from typing_extensions import TypedDict + + +class ListLibrariesResponseTypedDict(TypedDict): + data: List[LibraryTypedDict] + + +class ListLibrariesResponse(BaseModel): + data: List[Library] diff --git a/src/mistralai/client/models/listlibraryout.py b/src/mistralai/client/models/listlibraryout.py deleted file mode 100644 index 39fa459f..00000000 --- a/src/mistralai/client/models/listlibraryout.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: cb78c529e763 - -from __future__ import annotations -from .libraryout import LibraryOut, LibraryOutTypedDict -from mistralai.client.types import BaseModel -from typing import List -from typing_extensions import TypedDict - - -class ListLibraryOutTypedDict(TypedDict): - data: List[LibraryOutTypedDict] - - -class ListLibraryOut(BaseModel): - data: List[LibraryOut] diff --git a/src/mistralai/client/models/messageinputcontentchunks.py b/src/mistralai/client/models/messageinputcontentchunks.py index 63cf14e7..1e04ce24 100644 --- a/src/mistralai/client/models/messageinputcontentchunks.py +++ b/src/mistralai/client/models/messageinputcontentchunks.py @@ -2,10 +2,13 @@ # @generated-id: 01025c12866a from __future__ import annotations +from .conversationthinkchunk import ( + ConversationThinkChunk, + ConversationThinkChunkTypedDict, +) from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from .thinkchunk import ThinkChunk, ThinkChunkTypedDict from .toolfilechunk import ToolFileChunk, ToolFileChunkTypedDict from typing import Union from typing_extensions import TypeAliasType @@ -17,7 +20,7 @@ TextChunkTypedDict, ImageURLChunkTypedDict, DocumentURLChunkTypedDict, - ThinkChunkTypedDict, + ConversationThinkChunkTypedDict, ToolFileChunkTypedDict, ], ) @@ -25,5 +28,11 @@ MessageInputContentChunks = TypeAliasType( "MessageInputContentChunks", - Union[TextChunk, ImageURLChunk, DocumentURLChunk, ThinkChunk, ToolFileChunk], + Union[ + TextChunk, + ImageURLChunk, + DocumentURLChunk, + ConversationThinkChunk, + ToolFileChunk, + ], ) diff --git a/src/mistralai/client/models/messageinputentry.py b/src/mistralai/client/models/messageinputentry.py index 15046d25..c948a13e 100644 --- a/src/mistralai/client/models/messageinputentry.py +++ b/src/mistralai/client/models/messageinputentry.py @@ -15,18 +15,15 @@ UNSET_SENTINEL, UnrecognizedStr, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -MessageInputEntryObject = Literal["entry",] - - -MessageInputEntryType = Literal["message.input",] - - -MessageInputEntryRole = Union[ +Role = Union[ Literal[ "assistant", "user", @@ -49,10 +46,10 @@ class MessageInputEntryTypedDict(TypedDict): r"""Representation of an input message inside the conversation.""" - role: MessageInputEntryRole + role: Role content: MessageInputEntryContentTypedDict - object: NotRequired[MessageInputEntryObject] - type: NotRequired[MessageInputEntryType] + object: Literal["entry"] + type: Literal["message.input"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] id: NotRequired[str] @@ -62,13 +59,22 @@ class MessageInputEntryTypedDict(TypedDict): class MessageInputEntry(BaseModel): r"""Representation of an input message inside the conversation.""" - role: MessageInputEntryRole + role: Role content: MessageInputEntryContent - object: Optional[MessageInputEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[MessageInputEntryType] = "message.input" + type: Annotated[ + Annotated[ + Optional[Literal["message.input"]], + AfterValidator(validate_const("message.input")), + ], + pydantic.Field(alias="type"), + ] = "message.input" created_at: Optional[datetime] = None @@ -80,37 +86,33 @@ class MessageInputEntry(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "type", - "created_at", - "completed_at", - "id", - "prefix", - ] - nullable_fields = ["completed_at"] - null_default_fields = [] - + optional_fields = set( + ["object", "type", "created_at", "completed_at", "id", "prefix"] + ) + nullable_fields = set(["completed_at"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + MessageInputEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/messageoutputcontentchunks.py b/src/mistralai/client/models/messageoutputcontentchunks.py index def7a4d2..bf455d17 100644 --- a/src/mistralai/client/models/messageoutputcontentchunks.py +++ b/src/mistralai/client/models/messageoutputcontentchunks.py @@ -2,10 +2,13 @@ # @generated-id: 2ed248515035 from __future__ import annotations +from .conversationthinkchunk import ( + ConversationThinkChunk, + ConversationThinkChunkTypedDict, +) from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from .thinkchunk import ThinkChunk, ThinkChunkTypedDict from .toolfilechunk import ToolFileChunk, ToolFileChunkTypedDict from .toolreferencechunk import ToolReferenceChunk, ToolReferenceChunkTypedDict from typing import Union @@ -18,7 +21,7 @@ TextChunkTypedDict, ImageURLChunkTypedDict, DocumentURLChunkTypedDict, - ThinkChunkTypedDict, + ConversationThinkChunkTypedDict, ToolFileChunkTypedDict, ToolReferenceChunkTypedDict, ], @@ -31,7 +34,7 @@ TextChunk, ImageURLChunk, DocumentURLChunk, - ThinkChunk, + ConversationThinkChunk, ToolFileChunk, ToolReferenceChunk, ], diff --git a/src/mistralai/client/models/messageoutputentry.py b/src/mistralai/client/models/messageoutputentry.py index 8752fc36..6a9c52ed 100644 --- a/src/mistralai/client/models/messageoutputentry.py +++ b/src/mistralai/client/models/messageoutputentry.py @@ -14,18 +14,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -MessageOutputEntryObject = Literal["entry",] - - -MessageOutputEntryType = Literal["message.output",] - - -MessageOutputEntryRole = Literal["assistant",] +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict MessageOutputEntryContentTypedDict = TypeAliasType( @@ -41,70 +35,87 @@ class MessageOutputEntryTypedDict(TypedDict): content: MessageOutputEntryContentTypedDict - object: NotRequired[MessageOutputEntryObject] - type: NotRequired[MessageOutputEntryType] + object: Literal["entry"] + type: Literal["message.output"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] - id: NotRequired[str] agent_id: NotRequired[Nullable[str]] model: NotRequired[Nullable[str]] - role: NotRequired[MessageOutputEntryRole] + id: NotRequired[str] + role: Literal["assistant"] class MessageOutputEntry(BaseModel): content: MessageOutputEntryContent - object: Optional[MessageOutputEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[MessageOutputEntryType] = "message.output" + type: Annotated[ + Annotated[ + Optional[Literal["message.output"]], + AfterValidator(validate_const("message.output")), + ], + pydantic.Field(alias="type"), + ] = "message.output" created_at: Optional[datetime] = None completed_at: OptionalNullable[datetime] = UNSET - id: Optional[str] = None - agent_id: OptionalNullable[str] = UNSET model: OptionalNullable[str] = UNSET - role: Optional[MessageOutputEntryRole] = "assistant" + id: Optional[str] = None + + role: Annotated[ + Annotated[ + Optional[Literal["assistant"]], AfterValidator(validate_const("assistant")) + ], + pydantic.Field(alias="role"), + ] = "assistant" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "object", - "type", - "created_at", - "completed_at", - "id", - "agent_id", - "model", - "role", - ] - nullable_fields = ["completed_at", "agent_id", "model"] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "type", + "created_at", + "completed_at", + "agent_id", + "model", + "id", + "role", + ] + ) + nullable_fields = set(["completed_at", "agent_id", "model"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + MessageOutputEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/messageoutputevent.py b/src/mistralai/client/models/messageoutputevent.py index 39c10139..d765f4fd 100644 --- a/src/mistralai/client/models/messageoutputevent.py +++ b/src/mistralai/client/models/messageoutputevent.py @@ -19,9 +19,6 @@ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -MessageOutputEventRole = Literal["assistant",] - - MessageOutputEventContentTypedDict = TypeAliasType( "MessageOutputEventContentTypedDict", Union[str, OutputContentChunksTypedDict] ) @@ -41,7 +38,7 @@ class MessageOutputEventTypedDict(TypedDict): content_index: NotRequired[int] model: NotRequired[Nullable[str]] agent_id: NotRequired[Nullable[str]] - role: NotRequired[MessageOutputEventRole] + role: Literal["assistant"] class MessageOutputEvent(BaseModel): @@ -49,7 +46,7 @@ class MessageOutputEvent(BaseModel): content: MessageOutputEventContent - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["message.output.delta"], AfterValidator(validate_const("message.output.delta")), @@ -67,41 +64,42 @@ class MessageOutputEvent(BaseModel): agent_id: OptionalNullable[str] = UNSET - role: Optional[MessageOutputEventRole] = "assistant" + role: Annotated[ + Annotated[ + Optional[Literal["assistant"]], AfterValidator(validate_const("assistant")) + ], + pydantic.Field(alias="role"), + ] = "assistant" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "created_at", - "output_index", - "content_index", - "model", - "agent_id", - "role", - ] - nullable_fields = ["model", "agent_id"] - null_default_fields = [] - + optional_fields = set( + ["created_at", "output_index", "content_index", "model", "agent_id", "role"] + ) + nullable_fields = set(["model", "agent_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + MessageOutputEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/metricout.py b/src/mistralai/client/models/metric.py similarity index 60% rename from src/mistralai/client/models/metricout.py rename to src/mistralai/client/models/metric.py index 5705c712..1413f589 100644 --- a/src/mistralai/client/models/metricout.py +++ b/src/mistralai/client/models/metric.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 92d33621dda7 +# @generated-id: c6a65acdd1a2 from __future__ import annotations from mistralai.client.types import ( @@ -13,7 +13,7 @@ from typing_extensions import NotRequired, TypedDict -class MetricOutTypedDict(TypedDict): +class MetricTypedDict(TypedDict): r"""Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase).""" train_loss: NotRequired[Nullable[float]] @@ -21,7 +21,7 @@ class MetricOutTypedDict(TypedDict): valid_mean_token_accuracy: NotRequired[Nullable[float]] -class MetricOut(BaseModel): +class Metric(BaseModel): r"""Metrics at the step number during the fine-tuning job. Use these metrics to assess if the training is going smoothly (loss should decrease, token accuracy should increase).""" train_loss: OptionalNullable[float] = UNSET @@ -32,30 +32,25 @@ class MetricOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["train_loss", "valid_loss", "valid_mean_token_accuracy"] - nullable_fields = ["train_loss", "valid_loss", "valid_mean_token_accuracy"] - null_default_fields = [] - + optional_fields = set(["train_loss", "valid_loss", "valid_mean_token_accuracy"]) + nullable_fields = set(["train_loss", "valid_loss", "valid_mean_token_accuracy"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/modelcapabilities.py b/src/mistralai/client/models/modelcapabilities.py index c329efbc..d9293ccc 100644 --- a/src/mistralai/client/models/modelcapabilities.py +++ b/src/mistralai/client/models/modelcapabilities.py @@ -2,7 +2,8 @@ # @generated-id: 64d8a422ea29 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -40,3 +41,32 @@ class ModelCapabilities(BaseModel): audio: Optional[bool] = False audio_transcription: Optional[bool] = False + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set( + [ + "completion_chat", + "function_calling", + "completion_fim", + "fine_tuning", + "vision", + "ocr", + "classification", + "moderation", + "audio", + "audio_transcription", + ] + ) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/modelconversation.py b/src/mistralai/client/models/modelconversation.py index c0bacb7f..bb33d2e0 100644 --- a/src/mistralai/client/models/modelconversation.py +++ b/src/mistralai/client/models/modelconversation.py @@ -10,6 +10,7 @@ from .websearchpremiumtool import WebSearchPremiumTool, WebSearchPremiumToolTypedDict from .websearchtool import WebSearchTool, WebSearchToolTypedDict from datetime import datetime +from functools import partial from mistralai.client.types import ( BaseModel, Nullable, @@ -17,7 +18,11 @@ UNSET, UNSET_SENTINEL, ) -from pydantic import Field, model_serializer +from mistralai.client.utils import validate_const +from mistralai.client.utils.unions import parse_open_union +import pydantic +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator, BeforeValidator from typing import Any, Dict, List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -25,16 +30,36 @@ ModelConversationToolTypedDict = TypeAliasType( "ModelConversationToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) +class UnknownModelConversationTool(BaseModel): + r"""A ModelConversationTool variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_MODEL_CONVERSATION_TOOL_VARIANTS: dict[str, Any] = { + "code_interpreter": CodeInterpreterTool, + "document_library": DocumentLibraryTool, + "function": FunctionTool, + "image_generation": ImageGenerationTool, + "web_search": WebSearchTool, + "web_search_premium": WebSearchPremiumTool, +} + + ModelConversationTool = Annotated[ Union[ CodeInterpreterTool, @@ -43,14 +68,20 @@ ImageGenerationTool, WebSearchTool, WebSearchPremiumTool, + UnknownModelConversationTool, ], - Field(discriminator="TYPE"), + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_MODEL_CONVERSATION_TOOL_VARIANTS, + unknown_cls=UnknownModelConversationTool, + union_name="ModelConversationTool", + ) + ), ] -ModelConversationObject = Literal["conversation",] - - class ModelConversationTypedDict(TypedDict): id: str created_at: datetime @@ -68,7 +99,7 @@ class ModelConversationTypedDict(TypedDict): r"""Description of the what the conversation is about.""" metadata: NotRequired[Nullable[Dict[str, Any]]] r"""Custom metadata for the conversation.""" - object: NotRequired[ModelConversationObject] + object: Literal["conversation"] class ModelConversation(BaseModel): @@ -98,42 +129,51 @@ class ModelConversation(BaseModel): metadata: OptionalNullable[Dict[str, Any]] = UNSET r"""Custom metadata for the conversation.""" - object: Optional[ModelConversationObject] = "conversation" + object: Annotated[ + Annotated[ + Optional[Literal["conversation"]], + AfterValidator(validate_const("conversation")), + ], + pydantic.Field(alias="object"), + ] = "conversation" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "instructions", - "tools", - "completion_args", - "name", - "description", - "metadata", - "object", - ] - nullable_fields = ["instructions", "name", "description", "metadata"] - null_default_fields = [] - + optional_fields = set( + [ + "instructions", + "tools", + "completion_args", + "name", + "description", + "metadata", + "object", + ] + ) + nullable_fields = set(["instructions", "name", "description", "metadata"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ModelConversation.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/modellist.py b/src/mistralai/client/models/modellist.py index c122122c..5fd835f2 100644 --- a/src/mistralai/client/models/modellist.py +++ b/src/mistralai/client/models/modellist.py @@ -4,9 +4,12 @@ from __future__ import annotations from .basemodelcard import BaseModelCard, BaseModelCardTypedDict from .ftmodelcard import FTModelCard, FTModelCardTypedDict -from mistralai.client.types import BaseModel -from pydantic import Field -from typing import List, Optional, Union +from functools import partial +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import BeforeValidator +from typing import Any, List, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -15,8 +18,33 @@ ) +class UnknownModelListData(BaseModel): + r"""A ModelListData variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_MODEL_LIST_DATA_VARIANTS: dict[str, Any] = { + "base": BaseModelCard, + "fine-tuned": FTModelCard, +} + + ModelListData = Annotated[ - Union[BaseModelCard, FTModelCard], Field(discriminator="TYPE") + Union[BaseModelCard, FTModelCard, UnknownModelListData], + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_MODEL_LIST_DATA_VARIANTS, + unknown_cls=UnknownModelListData, + union_name="ModelListData", + ) + ), ] @@ -29,3 +57,19 @@ class ModelList(BaseModel): object: Optional[str] = "list" data: Optional[List[ModelListData]] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "data"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/moderationobject.py b/src/mistralai/client/models/moderationobject.py index 9aa4eb15..e7ccd8f6 100644 --- a/src/mistralai/client/models/moderationobject.py +++ b/src/mistralai/client/models/moderationobject.py @@ -2,7 +2,8 @@ # @generated-id: 132faad0549a from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Dict, Optional from typing_extensions import NotRequired, TypedDict @@ -20,3 +21,19 @@ class ModerationObject(BaseModel): category_scores: Optional[Dict[str, float]] = None r"""Moderation result""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["categories", "category_scores"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/ocrimageobject.py b/src/mistralai/client/models/ocrimageobject.py index e95b67e1..365f062b 100644 --- a/src/mistralai/client/models/ocrimageobject.py +++ b/src/mistralai/client/models/ocrimageobject.py @@ -54,37 +54,34 @@ class OCRImageObject(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["image_base64", "image_annotation"] - nullable_fields = [ - "top_left_x", - "top_left_y", - "bottom_right_x", - "bottom_right_y", - "image_base64", - "image_annotation", - ] - null_default_fields = [] - + optional_fields = set(["image_base64", "image_annotation"]) + nullable_fields = set( + [ + "top_left_x", + "top_left_y", + "bottom_right_x", + "bottom_right_y", + "image_base64", + "image_annotation", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/ocrpageobject.py b/src/mistralai/client/models/ocrpageobject.py index 4f4ccf43..ffc7b3b6 100644 --- a/src/mistralai/client/models/ocrpageobject.py +++ b/src/mistralai/client/models/ocrpageobject.py @@ -63,30 +63,25 @@ class OCRPageObject(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["tables", "hyperlinks", "header", "footer"] - nullable_fields = ["header", "footer", "dimensions"] - null_default_fields = [] - + optional_fields = set(["tables", "hyperlinks", "header", "footer"]) + nullable_fields = set(["header", "footer", "dimensions"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/ocrrequest.py b/src/mistralai/client/models/ocrrequest.py index 18b899dd..4ad337ce 100644 --- a/src/mistralai/client/models/ocrrequest.py +++ b/src/mistralai/client/models/ocrrequest.py @@ -18,14 +18,16 @@ from typing_extensions import NotRequired, TypeAliasType, TypedDict -DocumentTypedDict = TypeAliasType( - "DocumentTypedDict", +DocumentUnionTypedDict = TypeAliasType( + "DocumentUnionTypedDict", Union[FileChunkTypedDict, ImageURLChunkTypedDict, DocumentURLChunkTypedDict], ) r"""Document to run OCR on""" -Document = TypeAliasType("Document", Union[FileChunk, ImageURLChunk, DocumentURLChunk]) +DocumentUnion = TypeAliasType( + "DocumentUnion", Union[FileChunk, ImageURLChunk, DocumentURLChunk] +) r"""Document to run OCR on""" @@ -37,7 +39,7 @@ class OCRRequestTypedDict(TypedDict): model: Nullable[str] - document: DocumentTypedDict + document: DocumentUnionTypedDict r"""Document to run OCR on""" id: NotRequired[str] pages: NotRequired[Nullable[List[int]]] @@ -62,7 +64,7 @@ class OCRRequestTypedDict(TypedDict): class OCRRequest(BaseModel): model: Nullable[str] - document: Document + document: DocumentUnion r"""Document to run OCR on""" id: Optional[str] = None @@ -96,52 +98,51 @@ class OCRRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "id", - "pages", - "include_image_base64", - "image_limit", - "image_min_size", - "bbox_annotation_format", - "document_annotation_format", - "document_annotation_prompt", - "table_format", - "extract_header", - "extract_footer", - ] - nullable_fields = [ - "model", - "pages", - "include_image_base64", - "image_limit", - "image_min_size", - "bbox_annotation_format", - "document_annotation_format", - "document_annotation_prompt", - "table_format", - ] - null_default_fields = [] - + optional_fields = set( + [ + "id", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + "document_annotation_prompt", + "table_format", + "extract_header", + "extract_footer", + ] + ) + nullable_fields = set( + [ + "model", + "pages", + "include_image_base64", + "image_limit", + "image_min_size", + "bbox_annotation_format", + "document_annotation_format", + "document_annotation_prompt", + "table_format", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/ocrresponse.py b/src/mistralai/client/models/ocrresponse.py index 0a36e975..e63eed98 100644 --- a/src/mistralai/client/models/ocrresponse.py +++ b/src/mistralai/client/models/ocrresponse.py @@ -40,30 +40,25 @@ class OCRResponse(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["document_annotation"] - nullable_fields = ["document_annotation"] - null_default_fields = [] - + optional_fields = set(["document_annotation"]) + nullable_fields = set(["document_annotation"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/ocrtableobject.py b/src/mistralai/client/models/ocrtableobject.py index e32ad894..66bb050f 100644 --- a/src/mistralai/client/models/ocrtableobject.py +++ b/src/mistralai/client/models/ocrtableobject.py @@ -36,3 +36,9 @@ class OCRTableObject(BaseModel): format_: Annotated[Format, pydantic.Field(alias="format")] r"""Format of the table""" + + +try: + OCRTableObject.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/ocrusageinfo.py b/src/mistralai/client/models/ocrusageinfo.py index a421d850..2ec1322b 100644 --- a/src/mistralai/client/models/ocrusageinfo.py +++ b/src/mistralai/client/models/ocrusageinfo.py @@ -29,30 +29,25 @@ class OCRUsageInfo(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["doc_size_bytes"] - nullable_fields = ["doc_size_bytes"] - null_default_fields = [] - + optional_fields = set(["doc_size_bytes"]) + nullable_fields = set(["doc_size_bytes"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/outputcontentchunks.py b/src/mistralai/client/models/outputcontentchunks.py index 1a115fe8..fab7907b 100644 --- a/src/mistralai/client/models/outputcontentchunks.py +++ b/src/mistralai/client/models/outputcontentchunks.py @@ -2,10 +2,13 @@ # @generated-id: 9ad9741f4975 from __future__ import annotations +from .conversationthinkchunk import ( + ConversationThinkChunk, + ConversationThinkChunkTypedDict, +) from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from .thinkchunk import ThinkChunk, ThinkChunkTypedDict from .toolfilechunk import ToolFileChunk, ToolFileChunkTypedDict from .toolreferencechunk import ToolReferenceChunk, ToolReferenceChunkTypedDict from typing import Union @@ -18,7 +21,7 @@ TextChunkTypedDict, ImageURLChunkTypedDict, DocumentURLChunkTypedDict, - ThinkChunkTypedDict, + ConversationThinkChunkTypedDict, ToolFileChunkTypedDict, ToolReferenceChunkTypedDict, ], @@ -31,7 +34,7 @@ TextChunk, ImageURLChunk, DocumentURLChunk, - ThinkChunk, + ConversationThinkChunk, ToolFileChunk, ToolReferenceChunk, ], diff --git a/src/mistralai/client/models/prediction.py b/src/mistralai/client/models/prediction.py index 52f4adf1..0c6f4182 100644 --- a/src/mistralai/client/models/prediction.py +++ b/src/mistralai/client/models/prediction.py @@ -2,9 +2,10 @@ # @generated-id: 1cc842a069a5 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -20,7 +21,7 @@ class PredictionTypedDict(TypedDict): class Prediction(BaseModel): r"""Enable users to specify an expected completion, optimizing response times by leveraging known or predictable content.""" - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["content"]], AfterValidator(validate_const("content")) ], @@ -28,3 +29,25 @@ class Prediction(BaseModel): ] = "content" content: Optional[str] = "" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type", "content"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + Prediction.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptionerror.py b/src/mistralai/client/models/realtimetranscriptionerror.py index f8f2d3da..c661e461 100644 --- a/src/mistralai/client/models/realtimetranscriptionerror.py +++ b/src/mistralai/client/models/realtimetranscriptionerror.py @@ -6,9 +6,10 @@ RealtimeTranscriptionErrorDetail, RealtimeTranscriptionErrorDetailTypedDict, ) -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, TypedDict @@ -22,7 +23,29 @@ class RealtimeTranscriptionErrorTypedDict(TypedDict): class RealtimeTranscriptionError(BaseModel): error: RealtimeTranscriptionErrorDetail - TYPE: Annotated[ + type: Annotated[ Annotated[Optional[Literal["error"]], AfterValidator(validate_const("error"))], pydantic.Field(alias="type"), ] = "error" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionError.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptioninputaudioappend.py b/src/mistralai/client/models/realtimetranscriptioninputaudioappend.py new file mode 100644 index 00000000..8156a270 --- /dev/null +++ b/src/mistralai/client/models/realtimetranscriptioninputaudioappend.py @@ -0,0 +1,52 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 8b03cde6e115 + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionInputAudioAppendTypedDict(TypedDict): + audio: str + r"""Base64-encoded raw PCM bytes matching the current audio_format. Max decoded size: 262144 bytes.""" + type: Literal["input_audio.append"] + + +class RealtimeTranscriptionInputAudioAppend(BaseModel): + audio: str + r"""Base64-encoded raw PCM bytes matching the current audio_format. Max decoded size: 262144 bytes.""" + + type: Annotated[ + Annotated[ + Optional[Literal["input_audio.append"]], + AfterValidator(validate_const("input_audio.append")), + ], + pydantic.Field(alias="type"), + ] = "input_audio.append" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionInputAudioAppend.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptioninputaudioend.py b/src/mistralai/client/models/realtimetranscriptioninputaudioend.py new file mode 100644 index 00000000..473eedb7 --- /dev/null +++ b/src/mistralai/client/models/realtimetranscriptioninputaudioend.py @@ -0,0 +1,47 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: c187ba1b551d + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionInputAudioEndTypedDict(TypedDict): + type: Literal["input_audio.end"] + + +class RealtimeTranscriptionInputAudioEnd(BaseModel): + type: Annotated[ + Annotated[ + Optional[Literal["input_audio.end"]], + AfterValidator(validate_const("input_audio.end")), + ], + pydantic.Field(alias="type"), + ] = "input_audio.end" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionInputAudioEnd.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptioninputaudioflush.py b/src/mistralai/client/models/realtimetranscriptioninputaudioflush.py new file mode 100644 index 00000000..553d14c7 --- /dev/null +++ b/src/mistralai/client/models/realtimetranscriptioninputaudioflush.py @@ -0,0 +1,47 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: b27b600c310e + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionInputAudioFlushTypedDict(TypedDict): + type: Literal["input_audio.flush"] + + +class RealtimeTranscriptionInputAudioFlush(BaseModel): + type: Annotated[ + Annotated[ + Optional[Literal["input_audio.flush"]], + AfterValidator(validate_const("input_audio.flush")), + ], + pydantic.Field(alias="type"), + ] = "input_audio.flush" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionInputAudioFlush.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptionsession.py b/src/mistralai/client/models/realtimetranscriptionsession.py index d20d0d8c..a74a457b 100644 --- a/src/mistralai/client/models/realtimetranscriptionsession.py +++ b/src/mistralai/client/models/realtimetranscriptionsession.py @@ -3,14 +3,22 @@ from __future__ import annotations from .audioformat import AudioFormat, AudioFormatTypedDict -from mistralai.client.types import BaseModel -from typing_extensions import TypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict class RealtimeTranscriptionSessionTypedDict(TypedDict): request_id: str model: str audio_format: AudioFormatTypedDict + target_streaming_delay_ms: NotRequired[Nullable[int]] class RealtimeTranscriptionSession(BaseModel): @@ -19,3 +27,30 @@ class RealtimeTranscriptionSession(BaseModel): model: str audio_format: AudioFormat + + target_streaming_delay_ms: OptionalNullable[int] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["target_streaming_delay_ms"]) + nullable_fields = set(["target_streaming_delay_ms"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/realtimetranscriptionsessioncreated.py b/src/mistralai/client/models/realtimetranscriptionsessioncreated.py index c4fa5774..bb96875a 100644 --- a/src/mistralai/client/models/realtimetranscriptionsessioncreated.py +++ b/src/mistralai/client/models/realtimetranscriptionsessioncreated.py @@ -6,9 +6,10 @@ RealtimeTranscriptionSession, RealtimeTranscriptionSessionTypedDict, ) -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, TypedDict @@ -22,10 +23,32 @@ class RealtimeTranscriptionSessionCreatedTypedDict(TypedDict): class RealtimeTranscriptionSessionCreated(BaseModel): session: RealtimeTranscriptionSession - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["session.created"]], AfterValidator(validate_const("session.created")), ], pydantic.Field(alias="type"), ] = "session.created" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionSessionCreated.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptionsessionupdated.py b/src/mistralai/client/models/realtimetranscriptionsessionupdated.py index a61fb05e..fea5db4a 100644 --- a/src/mistralai/client/models/realtimetranscriptionsessionupdated.py +++ b/src/mistralai/client/models/realtimetranscriptionsessionupdated.py @@ -6,9 +6,10 @@ RealtimeTranscriptionSession, RealtimeTranscriptionSessionTypedDict, ) -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, TypedDict @@ -22,10 +23,32 @@ class RealtimeTranscriptionSessionUpdatedTypedDict(TypedDict): class RealtimeTranscriptionSessionUpdated(BaseModel): session: RealtimeTranscriptionSession - TYPE: Annotated[ + type: Annotated[ Annotated[ Optional[Literal["session.updated"]], AfterValidator(validate_const("session.updated")), ], pydantic.Field(alias="type"), ] = "session.updated" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionSessionUpdated.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptionsessionupdatemessage.py b/src/mistralai/client/models/realtimetranscriptionsessionupdatemessage.py new file mode 100644 index 00000000..07ad59a4 --- /dev/null +++ b/src/mistralai/client/models/realtimetranscriptionsessionupdatemessage.py @@ -0,0 +1,54 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 4e1b3fd7c5a3 + +from __future__ import annotations +from .realtimetranscriptionsessionupdatepayload import ( + RealtimeTranscriptionSessionUpdatePayload, + RealtimeTranscriptionSessionUpdatePayloadTypedDict, +) +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, TypedDict + + +class RealtimeTranscriptionSessionUpdateMessageTypedDict(TypedDict): + session: RealtimeTranscriptionSessionUpdatePayloadTypedDict + type: Literal["session.update"] + + +class RealtimeTranscriptionSessionUpdateMessage(BaseModel): + session: RealtimeTranscriptionSessionUpdatePayload + + type: Annotated[ + Annotated[ + Optional[Literal["session.update"]], + AfterValidator(validate_const("session.update")), + ], + pydantic.Field(alias="type"), + ] = "session.update" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + RealtimeTranscriptionSessionUpdateMessage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/realtimetranscriptionsessionupdatepayload.py b/src/mistralai/client/models/realtimetranscriptionsessionupdatepayload.py new file mode 100644 index 00000000..a89441e9 --- /dev/null +++ b/src/mistralai/client/models/realtimetranscriptionsessionupdatepayload.py @@ -0,0 +1,54 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 7033fdb33ad4 + +from __future__ import annotations +from .audioformat import AudioFormat, AudioFormatTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict + + +class RealtimeTranscriptionSessionUpdatePayloadTypedDict(TypedDict): + audio_format: NotRequired[Nullable[AudioFormatTypedDict]] + r"""Set before sending audio. Audio format updates are rejected after audio starts.""" + target_streaming_delay_ms: NotRequired[Nullable[int]] + r"""Set before sending audio. Streaming delay updates are rejected after audio starts.""" + + +class RealtimeTranscriptionSessionUpdatePayload(BaseModel): + audio_format: OptionalNullable[AudioFormat] = UNSET + r"""Set before sending audio. Audio format updates are rejected after audio starts.""" + + target_streaming_delay_ms: OptionalNullable[int] = UNSET + r"""Set before sending audio. Streaming delay updates are rejected after audio starts.""" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["audio_format", "target_streaming_delay_ms"]) + nullable_fields = set(["audio_format", "target_streaming_delay_ms"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/referencechunk.py b/src/mistralai/client/models/referencechunk.py index 7634d8ae..e0bbae4e 100644 --- a/src/mistralai/client/models/referencechunk.py +++ b/src/mistralai/client/models/referencechunk.py @@ -2,20 +2,48 @@ # @generated-id: 921acd3a224a from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -ReferenceChunkType = Literal["reference",] +from typing_extensions import Annotated, TypedDict class ReferenceChunkTypedDict(TypedDict): reference_ids: List[int] - type: NotRequired[ReferenceChunkType] + type: Literal["reference"] class ReferenceChunk(BaseModel): reference_ids: List[int] - type: Optional[ReferenceChunkType] = "reference" + type: Annotated[ + Annotated[ + Optional[Literal["reference"]], AfterValidator(validate_const("reference")) + ], + pydantic.Field(alias="type"), + ] = "reference" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ReferenceChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/reprocessdocumentop.py b/src/mistralai/client/models/reprocessdocumentop.py deleted file mode 100644 index 48a4b72b..00000000 --- a/src/mistralai/client/models/reprocessdocumentop.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: b2913a7aa5c9 - -from __future__ import annotations -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from typing_extensions import Annotated, TypedDict - - -class ReprocessDocumentRequestTypedDict(TypedDict): - library_id: str - document_id: str - - -class ReprocessDocumentRequest(BaseModel): - library_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - document_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] diff --git a/src/mistralai/client/models/responsedoneevent.py b/src/mistralai/client/models/responsedoneevent.py index ed331ff1..be38fba8 100644 --- a/src/mistralai/client/models/responsedoneevent.py +++ b/src/mistralai/client/models/responsedoneevent.py @@ -4,9 +4,10 @@ from __future__ import annotations from .conversationusageinfo import ConversationUsageInfo, ConversationUsageInfoTypedDict from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -21,7 +22,7 @@ class ResponseDoneEventTypedDict(TypedDict): class ResponseDoneEvent(BaseModel): usage: ConversationUsageInfo - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["conversation.response.done"], AfterValidator(validate_const("conversation.response.done")), @@ -30,3 +31,25 @@ class ResponseDoneEvent(BaseModel): ] = "conversation.response.done" created_at: Optional[datetime] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ResponseDoneEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/responseerrorevent.py b/src/mistralai/client/models/responseerrorevent.py index 8f196a52..fa4d0d01 100644 --- a/src/mistralai/client/models/responseerrorevent.py +++ b/src/mistralai/client/models/responseerrorevent.py @@ -3,9 +3,10 @@ from __future__ import annotations from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -23,7 +24,7 @@ class ResponseErrorEvent(BaseModel): code: int - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["conversation.response.error"], AfterValidator(validate_const("conversation.response.error")), @@ -32,3 +33,25 @@ class ResponseErrorEvent(BaseModel): ] = "conversation.response.error" created_at: Optional[datetime] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ResponseErrorEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/responseformat.py b/src/mistralai/client/models/responseformat.py index 409b80d6..b2971412 100644 --- a/src/mistralai/client/models/responseformat.py +++ b/src/mistralai/client/models/responseformat.py @@ -32,30 +32,25 @@ class ResponseFormat(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "json_schema"] - nullable_fields = ["json_schema"] - null_default_fields = [] - + optional_fields = set(["type", "json_schema"]) + nullable_fields = set(["json_schema"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/responsestartedevent.py b/src/mistralai/client/models/responsestartedevent.py index 256d2a6c..84abfcd9 100644 --- a/src/mistralai/client/models/responsestartedevent.py +++ b/src/mistralai/client/models/responsestartedevent.py @@ -3,9 +3,10 @@ from __future__ import annotations from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -20,7 +21,7 @@ class ResponseStartedEventTypedDict(TypedDict): class ResponseStartedEvent(BaseModel): conversation_id: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["conversation.response.started"], AfterValidator(validate_const("conversation.response.started")), @@ -29,3 +30,25 @@ class ResponseStartedEvent(BaseModel): ] = "conversation.response.started" created_at: Optional[datetime] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ResponseStartedEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/retrieve_model_v1_models_model_id_getop.py b/src/mistralai/client/models/retrieve_model_v1_models_model_id_getop.py new file mode 100644 index 00000000..cd5955c1 --- /dev/null +++ b/src/mistralai/client/models/retrieve_model_v1_models_model_id_getop.py @@ -0,0 +1,64 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 6fefa90ca351 + +from __future__ import annotations +from .basemodelcard import BaseModelCard, BaseModelCardTypedDict +from .ftmodelcard import FTModelCard, FTModelCardTypedDict +from functools import partial +from mistralai.client.types import BaseModel +from mistralai.client.utils import FieldMetadata, PathParamMetadata +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union +from typing_extensions import Annotated, TypeAliasType, TypedDict + + +class RetrieveModelV1ModelsModelIDGetRequestTypedDict(TypedDict): + model_id: str + r"""The ID of the model to retrieve.""" + + +class RetrieveModelV1ModelsModelIDGetRequest(BaseModel): + model_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + r"""The ID of the model to retrieve.""" + + +ResponseRetrieveModelV1ModelsModelIDGetTypedDict = TypeAliasType( + "ResponseRetrieveModelV1ModelsModelIDGetTypedDict", + Union[BaseModelCardTypedDict, FTModelCardTypedDict], +) +r"""Successful Response""" + + +class UnknownResponseRetrieveModelV1ModelsModelIDGet(BaseModel): + r"""A ResponseRetrieveModelV1ModelsModelIDGet variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_RESPONSE_RETRIEVE_MODEL_V1_MODELS_MODEL_ID_GET_VARIANTS: dict[str, Any] = { + "base": BaseModelCard, + "fine-tuned": FTModelCard, +} + + +ResponseRetrieveModelV1ModelsModelIDGet = Annotated[ + Union[BaseModelCard, FTModelCard, UnknownResponseRetrieveModelV1ModelsModelIDGet], + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_RESPONSE_RETRIEVE_MODEL_V1_MODELS_MODEL_ID_GET_VARIANTS, + unknown_cls=UnknownResponseRetrieveModelV1ModelsModelIDGet, + union_name="ResponseRetrieveModelV1ModelsModelIDGet", + ) + ), +] +r"""Successful Response""" diff --git a/src/mistralai/client/models/retrievemodelop.py b/src/mistralai/client/models/retrievemodelop.py deleted file mode 100644 index b4334e9a..00000000 --- a/src/mistralai/client/models/retrievemodelop.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d883baa79c9e - -from __future__ import annotations -from .basemodelcard import BaseModelCard, BaseModelCardTypedDict -from .ftmodelcard import FTModelCard, FTModelCardTypedDict -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -class RetrieveModelRequestTypedDict(TypedDict): - model_id: str - r"""The ID of the model to retrieve.""" - - -class RetrieveModelRequest(BaseModel): - model_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - r"""The ID of the model to retrieve.""" - - -ResponseRetrieveModelV1ModelsModelIDGetTypedDict = TypeAliasType( - "ResponseRetrieveModelV1ModelsModelIDGetTypedDict", - Union[BaseModelCardTypedDict, FTModelCardTypedDict], -) -r"""Successful Response""" - - -ResponseRetrieveModelV1ModelsModelIDGet = Annotated[ - Union[BaseModelCard, FTModelCard], Field(discriminator="TYPE") -] -r"""Successful Response""" diff --git a/src/mistralai/client/models/security.py b/src/mistralai/client/models/security.py index 4fa8b4b2..f3b3423e 100644 --- a/src/mistralai/client/models/security.py +++ b/src/mistralai/client/models/security.py @@ -2,8 +2,9 @@ # @generated-id: c2ca0e2a36b7 from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import FieldMetadata, SecurityMetadata +from pydantic import model_serializer from typing import Optional from typing_extensions import Annotated, NotRequired, TypedDict @@ -24,3 +25,19 @@ class Security(BaseModel): ) ), ] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["ApiKey"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/sharingdelete.py b/src/mistralai/client/models/sharingdelete.py index 202732cf..33ccd7e7 100644 --- a/src/mistralai/client/models/sharingdelete.py +++ b/src/mistralai/client/models/sharingdelete.py @@ -33,30 +33,25 @@ class SharingDelete(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["org_id"] - nullable_fields = ["org_id"] - null_default_fields = [] - + optional_fields = set(["org_id"]) + nullable_fields = set(["org_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/sharingin.py b/src/mistralai/client/models/sharingin.py index 8cc3e896..7c1a52b0 100644 --- a/src/mistralai/client/models/sharingin.py +++ b/src/mistralai/client/models/sharingin.py @@ -37,30 +37,25 @@ class SharingIn(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["org_id"] - nullable_fields = ["org_id"] - null_default_fields = [] - + optional_fields = set(["org_id"]) + nullable_fields = set(["org_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/sharingout.py b/src/mistralai/client/models/sharingout.py index 77807154..ab3679a4 100644 --- a/src/mistralai/client/models/sharingout.py +++ b/src/mistralai/client/models/sharingout.py @@ -37,30 +37,25 @@ class SharingOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["user_id"] - nullable_fields = ["user_id", "share_with_uuid"] - null_default_fields = [] - + optional_fields = set(["user_id"]) + nullable_fields = set(["user_id", "share_with_uuid"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/startfinetuningjobop.py b/src/mistralai/client/models/startfinetuningjobop.py deleted file mode 100644 index 805a8721..00000000 --- a/src/mistralai/client/models/startfinetuningjobop.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 663886392468 - -from __future__ import annotations -from .classifierdetailedjobout import ( - ClassifierDetailedJobOut, - ClassifierDetailedJobOutTypedDict, -) -from .completiondetailedjobout import ( - CompletionDetailedJobOut, - CompletionDetailedJobOutTypedDict, -) -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -class StartFineTuningJobRequestTypedDict(TypedDict): - job_id: str - - -class StartFineTuningJobRequest(BaseModel): - job_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - -StartFineTuningJobResponseTypedDict = TypeAliasType( - "StartFineTuningJobResponseTypedDict", - Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict], -) -r"""OK""" - - -StartFineTuningJobResponse = Annotated[ - Union[ClassifierDetailedJobOut, CompletionDetailedJobOut], - Field(discriminator="JOB_TYPE"), -] -r"""OK""" diff --git a/src/mistralai/client/models/systemmessage.py b/src/mistralai/client/models/systemmessage.py index 352eca76..2602cd2d 100644 --- a/src/mistralai/client/models/systemmessage.py +++ b/src/mistralai/client/models/systemmessage.py @@ -33,7 +33,13 @@ class SystemMessageTypedDict(TypedDict): class SystemMessage(BaseModel): content: SystemMessageContent - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["system"], AfterValidator(validate_const("system"))], pydantic.Field(alias="role"), ] = "system" + + +try: + SystemMessage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/textchunk.py b/src/mistralai/client/models/textchunk.py index c0584234..ac9f3137 100644 --- a/src/mistralai/client/models/textchunk.py +++ b/src/mistralai/client/models/textchunk.py @@ -2,20 +2,46 @@ # @generated-id: 9c96fb86a9ab from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -TextChunkType = Literal["text",] +from typing_extensions import Annotated, TypedDict class TextChunkTypedDict(TypedDict): text: str - type: NotRequired[TextChunkType] + type: Literal["text"] class TextChunk(BaseModel): text: str - type: Optional[TextChunkType] = "text" + type: Annotated[ + Annotated[Optional[Literal["text"]], AfterValidator(validate_const("text"))], + pydantic.Field(alias="type"), + ] = "text" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + TextChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/thinkchunk.py b/src/mistralai/client/models/thinkchunk.py index a999f5d7..5995e601 100644 --- a/src/mistralai/client/models/thinkchunk.py +++ b/src/mistralai/client/models/thinkchunk.py @@ -4,33 +4,61 @@ from __future__ import annotations from .referencechunk import ReferenceChunk, ReferenceChunkTypedDict from .textchunk import TextChunk, TextChunkTypedDict -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -ThinkingTypedDict = TypeAliasType( - "ThinkingTypedDict", Union[ReferenceChunkTypedDict, TextChunkTypedDict] +ThinkChunkThinkingTypedDict = TypeAliasType( + "ThinkChunkThinkingTypedDict", Union[ReferenceChunkTypedDict, TextChunkTypedDict] ) -Thinking = TypeAliasType("Thinking", Union[ReferenceChunk, TextChunk]) - - -ThinkChunkType = Literal["thinking",] +ThinkChunkThinking = TypeAliasType( + "ThinkChunkThinking", Union[ReferenceChunk, TextChunk] +) class ThinkChunkTypedDict(TypedDict): - thinking: List[ThinkingTypedDict] + thinking: List[ThinkChunkThinkingTypedDict] + type: Literal["thinking"] closed: NotRequired[bool] r"""Whether the thinking chunk is closed or not. Currently only used for prefixing.""" - type: NotRequired[ThinkChunkType] class ThinkChunk(BaseModel): - thinking: List[Thinking] + thinking: List[ThinkChunkThinking] + + type: Annotated[ + Annotated[Literal["thinking"], AfterValidator(validate_const("thinking"))], + pydantic.Field(alias="type"), + ] = "thinking" closed: Optional[bool] = None r"""Whether the thinking chunk is closed or not. Currently only used for prefixing.""" - type: Optional[ThinkChunkType] = "thinking" + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["closed"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ThinkChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/tool.py b/src/mistralai/client/models/tool.py index a46d31f1..2b9965e5 100644 --- a/src/mistralai/client/models/tool.py +++ b/src/mistralai/client/models/tool.py @@ -4,7 +4,8 @@ from __future__ import annotations from .function import Function, FunctionTypedDict from .tooltypes import ToolTypes -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -18,3 +19,19 @@ class Tool(BaseModel): function: Function type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/toolcall.py b/src/mistralai/client/models/toolcall.py index 4a05bbd0..181cec33 100644 --- a/src/mistralai/client/models/toolcall.py +++ b/src/mistralai/client/models/toolcall.py @@ -4,7 +4,8 @@ from __future__ import annotations from .functioncall import FunctionCall, FunctionCallTypedDict from .tooltypes import ToolTypes -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -24,3 +25,19 @@ class ToolCall(BaseModel): type: Optional[ToolTypes] = None index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["id", "type", "index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/toolcallconfirmation.py b/src/mistralai/client/models/toolcallconfirmation.py new file mode 100644 index 00000000..fd6eca50 --- /dev/null +++ b/src/mistralai/client/models/toolcallconfirmation.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: f2e953cfb4fe + +from __future__ import annotations +from mistralai.client.types import BaseModel +from typing import Literal +from typing_extensions import TypedDict + + +Confirmation = Literal[ + "allow", + "deny", +] + + +class ToolCallConfirmationTypedDict(TypedDict): + tool_call_id: str + confirmation: Confirmation + + +class ToolCallConfirmation(BaseModel): + tool_call_id: str + + confirmation: Confirmation diff --git a/src/mistralai/client/models/toolchoice.py b/src/mistralai/client/models/toolchoice.py index aa2016fb..cb787df1 100644 --- a/src/mistralai/client/models/toolchoice.py +++ b/src/mistralai/client/models/toolchoice.py @@ -4,7 +4,8 @@ from __future__ import annotations from .functionname import FunctionName, FunctionNameTypedDict from .tooltypes import ToolTypes -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -24,3 +25,19 @@ class ToolChoice(BaseModel): r"""this restriction of `Function` is used to select a specific function to call""" type: Optional[ToolTypes] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["type"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/toolconfiguration.py b/src/mistralai/client/models/toolconfiguration.py new file mode 100644 index 00000000..b903c8b6 --- /dev/null +++ b/src/mistralai/client/models/toolconfiguration.py @@ -0,0 +1,53 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: faec24b75066 + +from __future__ import annotations +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing import List +from typing_extensions import NotRequired, TypedDict + + +class ToolConfigurationTypedDict(TypedDict): + exclude: NotRequired[Nullable[List[str]]] + include: NotRequired[Nullable[List[str]]] + requires_confirmation: NotRequired[Nullable[List[str]]] + + +class ToolConfiguration(BaseModel): + exclude: OptionalNullable[List[str]] = UNSET + + include: OptionalNullable[List[str]] = UNSET + + requires_confirmation: OptionalNullable[List[str]] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["exclude", "include", "requires_confirmation"]) + nullable_fields = set(["exclude", "include", "requires_confirmation"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/toolexecutiondeltaevent.py b/src/mistralai/client/models/toolexecutiondeltaevent.py index 384ec240..5a977ca6 100644 --- a/src/mistralai/client/models/toolexecutiondeltaevent.py +++ b/src/mistralai/client/models/toolexecutiondeltaevent.py @@ -4,9 +4,10 @@ from __future__ import annotations from .builtinconnectors import BuiltInConnectors from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -38,7 +39,7 @@ class ToolExecutionDeltaEvent(BaseModel): arguments: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["tool.execution.delta"], AfterValidator(validate_const("tool.execution.delta")), @@ -49,3 +50,25 @@ class ToolExecutionDeltaEvent(BaseModel): created_at: Optional[datetime] = None output_index: Optional[int] = 0 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at", "output_index"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ToolExecutionDeltaEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolexecutiondoneevent.py b/src/mistralai/client/models/toolexecutiondoneevent.py index 56f28899..1c9b0ec9 100644 --- a/src/mistralai/client/models/toolexecutiondoneevent.py +++ b/src/mistralai/client/models/toolexecutiondoneevent.py @@ -4,9 +4,10 @@ from __future__ import annotations from .builtinconnectors import BuiltInConnectors from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Any, Dict, Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -36,7 +37,7 @@ class ToolExecutionDoneEvent(BaseModel): name: ToolExecutionDoneEventName - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["tool.execution.done"], AfterValidator(validate_const("tool.execution.done")), @@ -49,3 +50,25 @@ class ToolExecutionDoneEvent(BaseModel): output_index: Optional[int] = 0 info: Optional[Dict[str, Any]] = None + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at", "output_index", "info"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + ToolExecutionDoneEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolexecutionentry.py b/src/mistralai/client/models/toolexecutionentry.py index 158cbf06..0d6f2a13 100644 --- a/src/mistralai/client/models/toolexecutionentry.py +++ b/src/mistralai/client/models/toolexecutionentry.py @@ -11,15 +11,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Any, Dict, Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -ToolExecutionEntryObject = Literal["entry",] - - -ToolExecutionEntryType = Literal["tool.execution",] +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict ToolExecutionEntryNameTypedDict = TypeAliasType( @@ -35,10 +32,12 @@ class ToolExecutionEntryTypedDict(TypedDict): name: ToolExecutionEntryNameTypedDict arguments: str - object: NotRequired[ToolExecutionEntryObject] - type: NotRequired[ToolExecutionEntryType] + object: Literal["entry"] + type: Literal["tool.execution"] created_at: NotRequired[datetime] completed_at: NotRequired[Nullable[datetime]] + agent_id: NotRequired[Nullable[str]] + model: NotRequired[Nullable[str]] id: NotRequired[str] info: NotRequired[Dict[str, Any]] @@ -48,44 +47,69 @@ class ToolExecutionEntry(BaseModel): arguments: str - object: Optional[ToolExecutionEntryObject] = "entry" + object: Annotated[ + Annotated[Optional[Literal["entry"]], AfterValidator(validate_const("entry"))], + pydantic.Field(alias="object"), + ] = "entry" - type: Optional[ToolExecutionEntryType] = "tool.execution" + type: Annotated[ + Annotated[ + Optional[Literal["tool.execution"]], + AfterValidator(validate_const("tool.execution")), + ], + pydantic.Field(alias="type"), + ] = "tool.execution" created_at: Optional[datetime] = None completed_at: OptionalNullable[datetime] = UNSET + agent_id: OptionalNullable[str] = UNSET + + model: OptionalNullable[str] = UNSET + id: Optional[str] = None info: Optional[Dict[str, Any]] = None @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["object", "type", "created_at", "completed_at", "id", "info"] - nullable_fields = ["completed_at"] - null_default_fields = [] - + optional_fields = set( + [ + "object", + "type", + "created_at", + "completed_at", + "agent_id", + "model", + "id", + "info", + ] + ) + nullable_fields = set(["completed_at", "agent_id", "model"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolExecutionEntry.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolexecutionstartedevent.py b/src/mistralai/client/models/toolexecutionstartedevent.py index 15918669..21e5bfa8 100644 --- a/src/mistralai/client/models/toolexecutionstartedevent.py +++ b/src/mistralai/client/models/toolexecutionstartedevent.py @@ -4,9 +4,16 @@ from __future__ import annotations from .builtinconnectors import BuiltInConnectors from datetime import datetime -from mistralai.client.types import BaseModel +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict @@ -29,6 +36,8 @@ class ToolExecutionStartedEventTypedDict(TypedDict): type: Literal["tool.execution.started"] created_at: NotRequired[datetime] output_index: NotRequired[int] + model: NotRequired[Nullable[str]] + agent_id: NotRequired[Nullable[str]] class ToolExecutionStartedEvent(BaseModel): @@ -38,7 +47,7 @@ class ToolExecutionStartedEvent(BaseModel): arguments: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["tool.execution.started"], AfterValidator(validate_const("tool.execution.started")), @@ -49,3 +58,38 @@ class ToolExecutionStartedEvent(BaseModel): created_at: Optional[datetime] = None output_index: Optional[int] = 0 + + model: OptionalNullable[str] = UNSET + + agent_id: OptionalNullable[str] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["created_at", "output_index", "model", "agent_id"]) + nullable_fields = set(["model", "agent_id"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + ToolExecutionStartedEvent.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolfilechunk.py b/src/mistralai/client/models/toolfilechunk.py index 6eebd562..0708b3ff 100644 --- a/src/mistralai/client/models/toolfilechunk.py +++ b/src/mistralai/client/models/toolfilechunk.py @@ -10,12 +10,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -ToolFileChunkType = Literal["tool_file",] +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict ToolFileChunkToolTypedDict = TypeAliasType( @@ -29,7 +29,7 @@ class ToolFileChunkTypedDict(TypedDict): tool: ToolFileChunkToolTypedDict file_id: str - type: NotRequired[ToolFileChunkType] + type: Literal["tool_file"] file_name: NotRequired[Nullable[str]] file_type: NotRequired[Nullable[str]] @@ -39,7 +39,12 @@ class ToolFileChunk(BaseModel): file_id: str - type: Optional[ToolFileChunkType] = "tool_file" + type: Annotated[ + Annotated[ + Optional[Literal["tool_file"]], AfterValidator(validate_const("tool_file")) + ], + pydantic.Field(alias="type"), + ] = "tool_file" file_name: OptionalNullable[str] = UNSET @@ -47,30 +52,31 @@ class ToolFileChunk(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "file_name", "file_type"] - nullable_fields = ["file_name", "file_type"] - null_default_fields = [] - + optional_fields = set(["type", "file_name", "file_type"]) + nullable_fields = set(["file_name", "file_type"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolFileChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolmessage.py b/src/mistralai/client/models/toolmessage.py index b3e8ffd9..05a0ee63 100644 --- a/src/mistralai/client/models/toolmessage.py +++ b/src/mistralai/client/models/toolmessage.py @@ -28,49 +28,50 @@ class ToolMessageTypedDict(TypedDict): content: Nullable[ToolMessageContentTypedDict] + role: Literal["tool"] tool_call_id: NotRequired[Nullable[str]] name: NotRequired[Nullable[str]] - role: Literal["tool"] class ToolMessage(BaseModel): content: Nullable[ToolMessageContent] - tool_call_id: OptionalNullable[str] = UNSET - - name: OptionalNullable[str] = UNSET - - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["tool"], AfterValidator(validate_const("tool"))], pydantic.Field(alias="role"), ] = "tool" + tool_call_id: OptionalNullable[str] = UNSET + + name: OptionalNullable[str] = UNSET + @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["tool_call_id", "name"] - nullable_fields = ["content", "tool_call_id", "name"] - null_default_fields = [] - + optional_fields = set(["tool_call_id", "name"]) + nullable_fields = set(["content", "tool_call_id", "name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolMessage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/toolreferencechunk.py b/src/mistralai/client/models/toolreferencechunk.py index 3c76c8c2..95454fe8 100644 --- a/src/mistralai/client/models/toolreferencechunk.py +++ b/src/mistralai/client/models/toolreferencechunk.py @@ -10,12 +10,12 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const +import pydantic from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator from typing import Literal, Optional, Union -from typing_extensions import NotRequired, TypeAliasType, TypedDict - - -ToolReferenceChunkType = Literal["tool_reference",] +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict ToolReferenceChunkToolTypedDict = TypeAliasType( @@ -31,7 +31,7 @@ class ToolReferenceChunkTypedDict(TypedDict): tool: ToolReferenceChunkToolTypedDict title: str - type: NotRequired[ToolReferenceChunkType] + type: Literal["tool_reference"] url: NotRequired[Nullable[str]] favicon: NotRequired[Nullable[str]] description: NotRequired[Nullable[str]] @@ -42,7 +42,13 @@ class ToolReferenceChunk(BaseModel): title: str - type: Optional[ToolReferenceChunkType] = "tool_reference" + type: Annotated[ + Annotated[ + Optional[Literal["tool_reference"]], + AfterValidator(validate_const("tool_reference")), + ], + pydantic.Field(alias="type"), + ] = "tool_reference" url: OptionalNullable[str] = UNSET @@ -52,30 +58,31 @@ class ToolReferenceChunk(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["type", "url", "favicon", "description"] - nullable_fields = ["url", "favicon", "description"] - null_default_fields = [] - + optional_fields = set(["type", "url", "favicon", "description"]) + nullable_fields = set(["url", "favicon", "description"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + ToolReferenceChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/trainingfile.py b/src/mistralai/client/models/trainingfile.py index 1f710ff8..2faeda8b 100644 --- a/src/mistralai/client/models/trainingfile.py +++ b/src/mistralai/client/models/trainingfile.py @@ -2,7 +2,8 @@ # @generated-id: 2edf9bce227d from __future__ import annotations -from mistralai.client.types import BaseModel +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from pydantic import model_serializer from typing import Optional from typing_extensions import NotRequired, TypedDict @@ -16,3 +17,19 @@ class TrainingFile(BaseModel): file_id: str weight: Optional[float] = 1 + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["weight"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m diff --git a/src/mistralai/client/models/transcriptionresponse.py b/src/mistralai/client/models/transcriptionresponse.py index 786863ec..70315463 100644 --- a/src/mistralai/client/models/transcriptionresponse.py +++ b/src/mistralai/client/models/transcriptionresponse.py @@ -48,32 +48,27 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["segments"] - nullable_fields = ["language"] - null_default_fields = [] - + optional_fields = set(["segments"]) + nullable_fields = set(["language"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v diff --git a/src/mistralai/client/models/transcriptionsegmentchunk.py b/src/mistralai/client/models/transcriptionsegmentchunk.py index c78bec30..b87bfc2f 100644 --- a/src/mistralai/client/models/transcriptionsegmentchunk.py +++ b/src/mistralai/client/models/transcriptionsegmentchunk.py @@ -9,22 +9,21 @@ UNSET, UNSET_SENTINEL, ) +from mistralai.client.utils import validate_const import pydantic from pydantic import ConfigDict, model_serializer +from pydantic.functional_validators import AfterValidator from typing import Any, Dict, Literal, Optional -from typing_extensions import NotRequired, TypedDict - - -TranscriptionSegmentChunkType = Literal["transcription_segment",] +from typing_extensions import Annotated, NotRequired, TypedDict class TranscriptionSegmentChunkTypedDict(TypedDict): text: str start: float end: float + type: Literal["transcription_segment"] score: NotRequired[Nullable[float]] speaker_id: NotRequired[Nullable[str]] - type: NotRequired[TranscriptionSegmentChunkType] class TranscriptionSegmentChunk(BaseModel): @@ -39,12 +38,18 @@ class TranscriptionSegmentChunk(BaseModel): end: float + type: Annotated[ + Annotated[ + Optional[Literal["transcription_segment"]], + AfterValidator(validate_const("transcription_segment")), + ], + pydantic.Field(alias="type"), + ] = "transcription_segment" + score: OptionalNullable[float] = UNSET speaker_id: OptionalNullable[str] = UNSET - type: Optional[TranscriptionSegmentChunkType] = "transcription_segment" - @property def additional_properties(self): return self.__pydantic_extra__ @@ -55,33 +60,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["score", "speaker_id", "type"] - nullable_fields = ["score", "speaker_id"] - null_default_fields = [] - + optional_fields = set(["type", "score", "speaker_id"]) + nullable_fields = set(["score", "speaker_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v return m + + +try: + TranscriptionSegmentChunk.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/transcriptionstreamdone.py b/src/mistralai/client/models/transcriptionstreamdone.py index b5740b3b..e3c50169 100644 --- a/src/mistralai/client/models/transcriptionstreamdone.py +++ b/src/mistralai/client/models/transcriptionstreamdone.py @@ -41,7 +41,7 @@ class TranscriptionStreamDone(BaseModel): segments: Optional[List[TranscriptionSegmentChunk]] = None - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["transcription.done"], AfterValidator(validate_const("transcription.done")), @@ -59,33 +59,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["segments"] - nullable_fields = ["language"] - null_default_fields = [] - + optional_fields = set(["segments"]) + nullable_fields = set(["language"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v return m + + +try: + TranscriptionStreamDone.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/transcriptionstreamevents.py b/src/mistralai/client/models/transcriptionstreamevents.py index 17161a17..073fd99a 100644 --- a/src/mistralai/client/models/transcriptionstreamevents.py +++ b/src/mistralai/client/models/transcriptionstreamevents.py @@ -19,9 +19,12 @@ TranscriptionStreamTextDelta, TranscriptionStreamTextDeltaTypedDict, ) +from functools import partial from mistralai.client.types import BaseModel -from pydantic import Field -from typing import Union +from mistralai.client.utils.unions import parse_open_union +from pydantic import ConfigDict +from pydantic.functional_validators import BeforeValidator +from typing import Any, Literal, Union from typing_extensions import Annotated, TypeAliasType, TypedDict @@ -36,14 +39,41 @@ ) +class UnknownTranscriptionStreamEventsData(BaseModel): + r"""A TranscriptionStreamEventsData variant the SDK doesn't recognize. Preserves the raw payload.""" + + type: Literal["UNKNOWN"] = "UNKNOWN" + raw: Any + is_unknown: Literal[True] = True + + model_config = ConfigDict(frozen=True) + + +_TRANSCRIPTION_STREAM_EVENTS_DATA_VARIANTS: dict[str, Any] = { + "transcription.done": TranscriptionStreamDone, + "transcription.language": TranscriptionStreamLanguage, + "transcription.segment": TranscriptionStreamSegmentDelta, + "transcription.text.delta": TranscriptionStreamTextDelta, +} + + TranscriptionStreamEventsData = Annotated[ Union[ TranscriptionStreamDone, TranscriptionStreamLanguage, TranscriptionStreamSegmentDelta, TranscriptionStreamTextDelta, + UnknownTranscriptionStreamEventsData, ], - Field(discriminator="TYPE"), + BeforeValidator( + partial( + parse_open_union, + disc_key="type", + variants=_TRANSCRIPTION_STREAM_EVENTS_DATA_VARIANTS, + unknown_cls=UnknownTranscriptionStreamEventsData, + union_name="TranscriptionStreamEventsData", + ) + ), ] diff --git a/src/mistralai/client/models/transcriptionstreamlanguage.py b/src/mistralai/client/models/transcriptionstreamlanguage.py index 67b3e979..b6c61906 100644 --- a/src/mistralai/client/models/transcriptionstreamlanguage.py +++ b/src/mistralai/client/models/transcriptionstreamlanguage.py @@ -24,7 +24,7 @@ class TranscriptionStreamLanguage(BaseModel): audio_language: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["transcription.language"], AfterValidator(validate_const("transcription.language")), @@ -39,3 +39,9 @@ def additional_properties(self): @additional_properties.setter def additional_properties(self, value): self.__pydantic_extra__ = value # pyright: ignore[reportIncompatibleVariableOverride] + + +try: + TranscriptionStreamLanguage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/transcriptionstreamsegmentdelta.py b/src/mistralai/client/models/transcriptionstreamsegmentdelta.py index 8db5e736..32ef8f9b 100644 --- a/src/mistralai/client/models/transcriptionstreamsegmentdelta.py +++ b/src/mistralai/client/models/transcriptionstreamsegmentdelta.py @@ -21,8 +21,8 @@ class TranscriptionStreamSegmentDeltaTypedDict(TypedDict): text: str start: float end: float - speaker_id: NotRequired[Nullable[str]] type: Literal["transcription.segment"] + speaker_id: NotRequired[Nullable[str]] class TranscriptionStreamSegmentDelta(BaseModel): @@ -37,9 +37,7 @@ class TranscriptionStreamSegmentDelta(BaseModel): end: float - speaker_id: OptionalNullable[str] = UNSET - - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["transcription.segment"], AfterValidator(validate_const("transcription.segment")), @@ -47,6 +45,8 @@ class TranscriptionStreamSegmentDelta(BaseModel): pydantic.Field(alias="type"), ] = "transcription.segment" + speaker_id: OptionalNullable[str] = UNSET + @property def additional_properties(self): return self.__pydantic_extra__ @@ -57,33 +57,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["speaker_id"] - nullable_fields = ["speaker_id"] - null_default_fields = [] - + optional_fields = set(["speaker_id"]) + nullable_fields = set(["speaker_id"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v return m + + +try: + TranscriptionStreamSegmentDelta.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/transcriptionstreamtextdelta.py b/src/mistralai/client/models/transcriptionstreamtextdelta.py index 49338a08..42f0ffb7 100644 --- a/src/mistralai/client/models/transcriptionstreamtextdelta.py +++ b/src/mistralai/client/models/transcriptionstreamtextdelta.py @@ -24,7 +24,7 @@ class TranscriptionStreamTextDelta(BaseModel): text: str - TYPE: Annotated[ + type: Annotated[ Annotated[ Literal["transcription.text.delta"], AfterValidator(validate_const("transcription.text.delta")), @@ -39,3 +39,9 @@ def additional_properties(self): @additional_properties.setter def additional_properties(self, value): self.__pydantic_extra__ = value # pyright: ignore[reportIncompatibleVariableOverride] + + +try: + TranscriptionStreamTextDelta.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/unarchiveftmodelout.py b/src/mistralai/client/models/unarchiveftmodelout.py deleted file mode 100644 index 0249a69e..00000000 --- a/src/mistralai/client/models/unarchiveftmodelout.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 9dbc3bfb71ed - -from __future__ import annotations -from mistralai.client.types import BaseModel -from mistralai.client.utils import validate_const -import pydantic -from pydantic.functional_validators import AfterValidator -from typing import Literal, Optional -from typing_extensions import Annotated, NotRequired, TypedDict - - -class UnarchiveFTModelOutTypedDict(TypedDict): - id: str - object: Literal["model"] - archived: NotRequired[bool] - - -class UnarchiveFTModelOut(BaseModel): - id: str - - OBJECT: Annotated[ - Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], - pydantic.Field(alias="object"), - ] = "model" - - archived: Optional[bool] = False diff --git a/src/mistralai/client/models/unarchivemodelresponse.py b/src/mistralai/client/models/unarchivemodelresponse.py new file mode 100644 index 00000000..5c75d30e --- /dev/null +++ b/src/mistralai/client/models/unarchivemodelresponse.py @@ -0,0 +1,50 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 22e2ccbb0c80 + +from __future__ import annotations +from mistralai.client.types import BaseModel, UNSET_SENTINEL +from mistralai.client.utils import validate_const +import pydantic +from pydantic import model_serializer +from pydantic.functional_validators import AfterValidator +from typing import Literal, Optional +from typing_extensions import Annotated, NotRequired, TypedDict + + +class UnarchiveModelResponseTypedDict(TypedDict): + id: str + object: Literal["model"] + archived: NotRequired[bool] + + +class UnarchiveModelResponse(BaseModel): + id: str + + object: Annotated[ + Annotated[Optional[Literal["model"]], AfterValidator(validate_const("model"))], + pydantic.Field(alias="object"), + ] = "model" + + archived: Optional[bool] = False + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["object", "archived"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + + if val != UNSET_SENTINEL: + if val is not None or k not in optional_fields: + m[k] = val + + return m + + +try: + UnarchiveModelResponse.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/agentupdaterequest.py b/src/mistralai/client/models/updateagentrequest.py similarity index 66% rename from src/mistralai/client/models/agentupdaterequest.py rename to src/mistralai/client/models/updateagentrequest.py index 96e209d4..b751ff74 100644 --- a/src/mistralai/client/models/agentupdaterequest.py +++ b/src/mistralai/client/models/updateagentrequest.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 2d5a3a437819 +# @generated-id: 914b4b2be67a from __future__ import annotations from .codeinterpretertool import CodeInterpreterTool, CodeInterpreterToolTypedDict @@ -21,20 +21,20 @@ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -AgentUpdateRequestToolTypedDict = TypeAliasType( - "AgentUpdateRequestToolTypedDict", +UpdateAgentRequestToolTypedDict = TypeAliasType( + "UpdateAgentRequestToolTypedDict", Union[ + FunctionToolTypedDict, WebSearchToolTypedDict, WebSearchPremiumToolTypedDict, CodeInterpreterToolTypedDict, ImageGenerationToolTypedDict, - FunctionToolTypedDict, DocumentLibraryToolTypedDict, ], ) -AgentUpdateRequestTool = Annotated[ +UpdateAgentRequestTool = Annotated[ Union[ CodeInterpreterTool, DocumentLibraryTool, @@ -43,14 +43,14 @@ WebSearchTool, WebSearchPremiumTool, ], - Field(discriminator="TYPE"), + Field(discriminator="type"), ] -class AgentUpdateRequestTypedDict(TypedDict): +class UpdateAgentRequestTypedDict(TypedDict): instructions: NotRequired[Nullable[str]] r"""Instruction prompt the model will follow during the conversation.""" - tools: NotRequired[List[AgentUpdateRequestToolTypedDict]] + tools: NotRequired[List[UpdateAgentRequestToolTypedDict]] r"""List of tools which are available to the model during the conversation.""" completion_args: NotRequired[CompletionArgsTypedDict] r"""White-listed arguments from the completion API""" @@ -63,11 +63,11 @@ class AgentUpdateRequestTypedDict(TypedDict): version_message: NotRequired[Nullable[str]] -class AgentUpdateRequest(BaseModel): +class UpdateAgentRequest(BaseModel): instructions: OptionalNullable[str] = UNSET r"""Instruction prompt the model will follow during the conversation.""" - tools: Optional[List[AgentUpdateRequestTool]] = None + tools: Optional[List[UpdateAgentRequestTool]] = None r"""List of tools which are available to the model during the conversation.""" completion_args: Optional[CompletionArgs] = None @@ -89,50 +89,49 @@ class AgentUpdateRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "instructions", - "tools", - "completion_args", - "model", - "name", - "description", - "handoffs", - "deployment_chat", - "metadata", - "version_message", - ] - nullable_fields = [ - "instructions", - "model", - "name", - "description", - "handoffs", - "deployment_chat", - "metadata", - "version_message", - ] - null_default_fields = [] - + optional_fields = set( + [ + "instructions", + "tools", + "completion_args", + "model", + "name", + "description", + "handoffs", + "deployment_chat", + "metadata", + "version_message", + ] + ) + nullable_fields = set( + [ + "instructions", + "model", + "name", + "description", + "handoffs", + "deployment_chat", + "metadata", + "version_message", + ] + ) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/documentupdatein.py b/src/mistralai/client/models/updatedocumentrequest.py similarity index 60% rename from src/mistralai/client/models/documentupdatein.py rename to src/mistralai/client/models/updatedocumentrequest.py index 669554de..61e69655 100644 --- a/src/mistralai/client/models/documentupdatein.py +++ b/src/mistralai/client/models/updatedocumentrequest.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: d19c1b26a875 +# @generated-id: a8cfda07d337 from __future__ import annotations from datetime import datetime @@ -31,42 +31,37 @@ ) -class DocumentUpdateInTypedDict(TypedDict): +class UpdateDocumentRequestTypedDict(TypedDict): name: NotRequired[Nullable[str]] attributes: NotRequired[Nullable[Dict[str, AttributesTypedDict]]] -class DocumentUpdateIn(BaseModel): +class UpdateDocumentRequest(BaseModel): name: OptionalNullable[str] = UNSET attributes: OptionalNullable[Dict[str, Attributes]] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["name", "attributes"] - nullable_fields = ["name", "attributes"] - null_default_fields = [] - + optional_fields = set(["name", "attributes"]) + nullable_fields = set(["name", "attributes"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val return m diff --git a/src/mistralai/client/models/updateftmodelin.py b/src/mistralai/client/models/updateftmodelin.py deleted file mode 100644 index 4ac5a8a2..00000000 --- a/src/mistralai/client/models/updateftmodelin.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 39e2d678e651 - -from __future__ import annotations -from mistralai.client.types import ( - BaseModel, - Nullable, - OptionalNullable, - UNSET, - UNSET_SENTINEL, -) -from pydantic import model_serializer -from typing_extensions import NotRequired, TypedDict - - -class UpdateFTModelInTypedDict(TypedDict): - name: NotRequired[Nullable[str]] - description: NotRequired[Nullable[str]] - - -class UpdateFTModelIn(BaseModel): - name: OptionalNullable[str] = UNSET - - description: OptionalNullable[str] = UNSET - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = ["name", "description"] - nullable_fields = ["name", "description"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in type(self).model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/mistralai/client/models/updatelibraryrequest.py b/src/mistralai/client/models/updatelibraryrequest.py new file mode 100644 index 00000000..91cbf2a1 --- /dev/null +++ b/src/mistralai/client/models/updatelibraryrequest.py @@ -0,0 +1,49 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: 51bc63885337 + +from __future__ import annotations +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict + + +class UpdateLibraryRequestTypedDict(TypedDict): + name: NotRequired[Nullable[str]] + description: NotRequired[Nullable[str]] + + +class UpdateLibraryRequest(BaseModel): + name: OptionalNullable[str] = UNSET + + description: OptionalNullable[str] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["name", "description"]) + nullable_fields = set(["name", "description"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/updatemodelop.py b/src/mistralai/client/models/updatemodelop.py deleted file mode 100644 index 023be979..00000000 --- a/src/mistralai/client/models/updatemodelop.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: ba149ecfe03e - -from __future__ import annotations -from .classifierftmodelout import ClassifierFTModelOut, ClassifierFTModelOutTypedDict -from .completionftmodelout import CompletionFTModelOut, CompletionFTModelOutTypedDict -from .updateftmodelin import UpdateFTModelIn, UpdateFTModelInTypedDict -from mistralai.client.types import BaseModel -from mistralai.client.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import Field -from typing import Union -from typing_extensions import Annotated, TypeAliasType, TypedDict - - -class UpdateModelRequestTypedDict(TypedDict): - model_id: str - r"""The ID of the model to update.""" - update_ft_model_in: UpdateFTModelInTypedDict - - -class UpdateModelRequest(BaseModel): - model_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - r"""The ID of the model to update.""" - - update_ft_model_in: Annotated[ - UpdateFTModelIn, - FieldMetadata(request=RequestMetadata(media_type="application/json")), - ] - - -UpdateModelResponseTypedDict = TypeAliasType( - "UpdateModelResponseTypedDict", - Union[CompletionFTModelOutTypedDict, ClassifierFTModelOutTypedDict], -) -r"""OK""" - - -UpdateModelResponse = Annotated[ - Union[ClassifierFTModelOut, CompletionFTModelOut], Field(discriminator="MODEL_TYPE") -] -r"""OK""" diff --git a/src/mistralai/client/models/updatemodelrequest.py b/src/mistralai/client/models/updatemodelrequest.py new file mode 100644 index 00000000..f685cfcc --- /dev/null +++ b/src/mistralai/client/models/updatemodelrequest.py @@ -0,0 +1,49 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: fe649967751e + +from __future__ import annotations +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from pydantic import model_serializer +from typing_extensions import NotRequired, TypedDict + + +class UpdateModelRequestTypedDict(TypedDict): + name: NotRequired[Nullable[str]] + description: NotRequired[Nullable[str]] + + +class UpdateModelRequest(BaseModel): + name: OptionalNullable[str] = UNSET + + description: OptionalNullable[str] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["name", "description"]) + nullable_fields = set(["name", "description"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m diff --git a/src/mistralai/client/models/usageinfo.py b/src/mistralai/client/models/usageinfo.py index e78f92e7..31cbf07e 100644 --- a/src/mistralai/client/models/usageinfo.py +++ b/src/mistralai/client/models/usageinfo.py @@ -46,37 +46,34 @@ def additional_properties(self, value): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "prompt_tokens", - "completion_tokens", - "total_tokens", - "prompt_audio_seconds", - ] - nullable_fields = ["prompt_audio_seconds"] - null_default_fields = [] - + optional_fields = set( + [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_audio_seconds", + ] + ) + nullable_fields = set(["prompt_audio_seconds"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val for k, v in serialized.items(): m[k] = v diff --git a/src/mistralai/client/models/usermessage.py b/src/mistralai/client/models/usermessage.py index 25ccdf80..63e76792 100644 --- a/src/mistralai/client/models/usermessage.py +++ b/src/mistralai/client/models/usermessage.py @@ -28,37 +28,27 @@ class UserMessageTypedDict(TypedDict): class UserMessage(BaseModel): content: Nullable[UserMessageContent] - ROLE: Annotated[ + role: Annotated[ Annotated[Literal["user"], AfterValidator(validate_const("user"))], pydantic.Field(alias="role"), ] = "user" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["content"] - null_default_fields = [] - serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): + if val != UNSET_SENTINEL: m[k] = val return m + + +try: + UserMessage.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/wandbintegration.py b/src/mistralai/client/models/wandbintegration.py index c5db4a6d..f0df2c77 100644 --- a/src/mistralai/client/models/wandbintegration.py +++ b/src/mistralai/client/models/wandbintegration.py @@ -35,7 +35,7 @@ class WandbIntegration(BaseModel): api_key: str r"""The WandB API key to use for authentication.""" - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["wandb"], AfterValidator(validate_const("wandb"))], pydantic.Field(alias="type"), ] = "wandb" @@ -47,30 +47,31 @@ class WandbIntegration(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["name", "run_name"] - nullable_fields = ["name", "run_name"] - null_default_fields = [] - + optional_fields = set(["name", "run_name"]) + nullable_fields = set(["name", "run_name"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + WandbIntegration.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/wandbintegrationout.py b/src/mistralai/client/models/wandbintegrationresult.py similarity index 65% rename from src/mistralai/client/models/wandbintegrationout.py rename to src/mistralai/client/models/wandbintegrationresult.py index d0a09bf4..575cbd42 100644 --- a/src/mistralai/client/models/wandbintegrationout.py +++ b/src/mistralai/client/models/wandbintegrationresult.py @@ -1,5 +1,5 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -# @generated-id: 6b103d74195c +# @generated-id: 8787b4ad5458 from __future__ import annotations from mistralai.client.types import ( @@ -17,7 +17,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class WandbIntegrationOutTypedDict(TypedDict): +class WandbIntegrationResultTypedDict(TypedDict): project: str r"""The name of the project that the new run will be created under.""" type: Literal["wandb"] @@ -27,11 +27,11 @@ class WandbIntegrationOutTypedDict(TypedDict): url: NotRequired[Nullable[str]] -class WandbIntegrationOut(BaseModel): +class WandbIntegrationResult(BaseModel): project: str r"""The name of the project that the new run will be created under.""" - TYPE: Annotated[ + type: Annotated[ Annotated[Literal["wandb"], AfterValidator(validate_const("wandb"))], pydantic.Field(alias="type"), ] = "wandb" @@ -45,30 +45,31 @@ class WandbIntegrationOut(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["name", "run_name", "url"] - nullable_fields = ["name", "run_name", "url"] - null_default_fields = [] - + optional_fields = set(["name", "run_name", "url"]) + nullable_fields = set(["name", "run_name", "url"]) serialized = handler(self) - m = {} for n, f in type(self).model_fields.items(): k = f.alias or n val = serialized.get(k) - serialized.pop(k, None) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member + return m - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - return m +try: + WandbIntegrationResult.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/websearchpremiumtool.py b/src/mistralai/client/models/websearchpremiumtool.py index 9588ab1d..00d4a4b4 100644 --- a/src/mistralai/client/models/websearchpremiumtool.py +++ b/src/mistralai/client/models/websearchpremiumtool.py @@ -2,23 +2,65 @@ # @generated-id: bfe88af887e3 from __future__ import annotations -from mistralai.client.types import BaseModel +from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, TypedDict class WebSearchPremiumToolTypedDict(TypedDict): + tool_configuration: NotRequired[Nullable[ToolConfigurationTypedDict]] type: Literal["web_search_premium"] class WebSearchPremiumTool(BaseModel): - TYPE: Annotated[ + tool_configuration: OptionalNullable[ToolConfiguration] = UNSET + + type: Annotated[ Annotated[ Literal["web_search_premium"], AfterValidator(validate_const("web_search_premium")), ], pydantic.Field(alias="type"), ] = "web_search_premium" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["tool_configuration"]) + nullable_fields = set(["tool_configuration"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + WebSearchPremiumTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models/websearchtool.py b/src/mistralai/client/models/websearchtool.py index 27502909..6871080f 100644 --- a/src/mistralai/client/models/websearchtool.py +++ b/src/mistralai/client/models/websearchtool.py @@ -2,20 +2,62 @@ # @generated-id: 26b0903423e5 from __future__ import annotations -from mistralai.client.types import BaseModel +from .toolconfiguration import ToolConfiguration, ToolConfigurationTypedDict +from mistralai.client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) from mistralai.client.utils import validate_const import pydantic +from pydantic import model_serializer from pydantic.functional_validators import AfterValidator from typing import Literal -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, NotRequired, TypedDict class WebSearchToolTypedDict(TypedDict): + tool_configuration: NotRequired[Nullable[ToolConfigurationTypedDict]] type: Literal["web_search"] class WebSearchTool(BaseModel): - TYPE: Annotated[ + tool_configuration: OptionalNullable[ToolConfiguration] = UNSET + + type: Annotated[ Annotated[Literal["web_search"], AfterValidator(validate_const("web_search"))], pydantic.Field(alias="type"), ] = "web_search" + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = set(["tool_configuration"]) + nullable_fields = set(["tool_configuration"]) + serialized = handler(self) + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + is_nullable_and_explicitly_set = ( + k in nullable_fields + and (self.__pydantic_fields_set__.intersection({n})) # pylint: disable=no-member + ) + + if val != UNSET_SENTINEL: + if ( + val is not None + or k not in optional_fields + or is_nullable_and_explicitly_set + ): + m[k] = val + + return m + + +try: + WebSearchTool.model_rebuild() +except NameError: + pass diff --git a/src/mistralai/client/models_.py b/src/mistralai/client/models_.py index 05b33ac7..a287c413 100644 --- a/src/mistralai/client/models_.py +++ b/src/mistralai/client/models_.py @@ -2,7 +2,7 @@ # @generated-id: 1d277958a843 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env @@ -68,7 +68,7 @@ def list( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListModels", + operation_id="list_models_v1_models_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -83,12 +83,12 @@ def list( return unmarshal_json_response(models.ModelList, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def list_async( self, @@ -145,7 +145,7 @@ async def list_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ListModels", + operation_id="list_models_v1_models_get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -160,12 +160,12 @@ async def list_async( return unmarshal_json_response(models.ModelList, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def retrieve( self, @@ -196,7 +196,7 @@ def retrieve( else: base_url = self._get_url(base_url, url_variables) - request = models.RetrieveModelRequest( + request = models.RetrieveModelV1ModelsModelIDGetRequest( model_id=model_id, ) @@ -229,7 +229,7 @@ def retrieve( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RetrieveModel", + operation_id="retrieve_model_v1_models__model_id__get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -247,17 +247,17 @@ def retrieve( ) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def retrieve_async( self, @@ -288,7 +288,7 @@ async def retrieve_async( else: base_url = self._get_url(base_url, url_variables) - request = models.RetrieveModelRequest( + request = models.RetrieveModelV1ModelsModelIDGetRequest( model_id=model_id, ) @@ -321,7 +321,7 @@ async def retrieve_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="RetrieveModel", + operation_id="retrieve_model_v1_models__model_id__get", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -339,17 +339,17 @@ async def retrieve_async( ) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def delete( self, @@ -380,7 +380,7 @@ def delete( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteModelRequest( + request = models.DeleteModelV1ModelsModelIDDeleteRequest( model_id=model_id, ) @@ -413,7 +413,7 @@ def delete( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteModel", + operation_id="delete_model_v1_models__model_id__delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -429,17 +429,17 @@ def delete( return unmarshal_json_response(models.DeleteModelOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def delete_async( self, @@ -470,7 +470,7 @@ async def delete_async( else: base_url = self._get_url(base_url, url_variables) - request = models.DeleteModelRequest( + request = models.DeleteModelV1ModelsModelIDDeleteRequest( model_id=model_id, ) @@ -503,7 +503,7 @@ async def delete_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="DeleteModel", + operation_id="delete_model_v1_models__model_id__delete", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -519,17 +519,17 @@ async def delete_async( return unmarshal_json_response(models.DeleteModelOut, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def update( self, @@ -541,7 +541,7 @@ def update( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UpdateModelResponse: + ) -> models.JobsAPIRoutesFineTuningUpdateFineTunedModelResponse: r"""Update Fine Tuned Model Update a model name or description. @@ -564,9 +564,9 @@ def update( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateModelRequest( + request = models.JobsAPIRoutesFineTuningUpdateFineTunedModelRequest( model_id=model_id, - update_ft_model_in=models.UpdateFTModelIn( + update_model_request=models.UpdateModelRequest( name=name, description=description, ), @@ -586,7 +586,11 @@ def update( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.update_ft_model_in, False, False, "json", models.UpdateFTModelIn + request.update_model_request, + False, + False, + "json", + models.UpdateModelRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -604,7 +608,7 @@ def update( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateModel", + operation_id="jobs_api_routes_fine_tuning_update_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -616,15 +620,17 @@ def update( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UpdateModelResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningUpdateFineTunedModelResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def update_async( self, @@ -636,7 +642,7 @@ async def update_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UpdateModelResponse: + ) -> models.JobsAPIRoutesFineTuningUpdateFineTunedModelResponse: r"""Update Fine Tuned Model Update a model name or description. @@ -659,9 +665,9 @@ async def update_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UpdateModelRequest( + request = models.JobsAPIRoutesFineTuningUpdateFineTunedModelRequest( model_id=model_id, - update_ft_model_in=models.UpdateFTModelIn( + update_model_request=models.UpdateModelRequest( name=name, description=description, ), @@ -681,7 +687,11 @@ async def update_async( http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.update_ft_model_in, False, False, "json", models.UpdateFTModelIn + request.update_model_request, + False, + False, + "json", + models.UpdateModelRequest, ), allow_empty_value=None, timeout_ms=timeout_ms, @@ -699,7 +709,7 @@ async def update_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UpdateModel", + operation_id="jobs_api_routes_fine_tuning_update_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -711,15 +721,17 @@ async def update_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UpdateModelResponse, http_res) + return unmarshal_json_response( + models.JobsAPIRoutesFineTuningUpdateFineTunedModelResponse, http_res + ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def archive( self, @@ -729,7 +741,7 @@ def archive( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ArchiveFTModelOut: + ) -> models.ArchiveModelResponse: r"""Archive Fine Tuned Model Archive a fine-tuned model. @@ -750,7 +762,7 @@ def archive( else: base_url = self._get_url(base_url, url_variables) - request = models.ArchiveModelRequest( + request = models.JobsAPIRoutesFineTuningArchiveFineTunedModelRequest( model_id=model_id, ) @@ -783,7 +795,7 @@ def archive( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ArchiveModel", + operation_id="jobs_api_routes_fine_tuning_archive_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -795,15 +807,15 @@ def archive( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ArchiveFTModelOut, http_res) + return unmarshal_json_response(models.ArchiveModelResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def archive_async( self, @@ -813,7 +825,7 @@ async def archive_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.ArchiveFTModelOut: + ) -> models.ArchiveModelResponse: r"""Archive Fine Tuned Model Archive a fine-tuned model. @@ -834,7 +846,7 @@ async def archive_async( else: base_url = self._get_url(base_url, url_variables) - request = models.ArchiveModelRequest( + request = models.JobsAPIRoutesFineTuningArchiveFineTunedModelRequest( model_id=model_id, ) @@ -867,7 +879,7 @@ async def archive_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="ArchiveModel", + operation_id="jobs_api_routes_fine_tuning_archive_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -879,15 +891,15 @@ async def archive_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.ArchiveFTModelOut, http_res) + return unmarshal_json_response(models.ArchiveModelResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def unarchive( self, @@ -897,7 +909,7 @@ def unarchive( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UnarchiveFTModelOut: + ) -> models.UnarchiveModelResponse: r"""Unarchive Fine Tuned Model Un-archive a fine-tuned model. @@ -918,7 +930,7 @@ def unarchive( else: base_url = self._get_url(base_url, url_variables) - request = models.UnarchiveModelRequest( + request = models.JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest( model_id=model_id, ) @@ -951,7 +963,7 @@ def unarchive( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UnarchiveModel", + operation_id="jobs_api_routes_fine_tuning_unarchive_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -963,15 +975,15 @@ def unarchive( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UnarchiveFTModelOut, http_res) + return unmarshal_json_response(models.UnarchiveModelResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def unarchive_async( self, @@ -981,7 +993,7 @@ async def unarchive_async( server_url: Optional[str] = None, timeout_ms: Optional[int] = None, http_headers: Optional[Mapping[str, str]] = None, - ) -> models.UnarchiveFTModelOut: + ) -> models.UnarchiveModelResponse: r"""Unarchive Fine Tuned Model Un-archive a fine-tuned model. @@ -1002,7 +1014,7 @@ async def unarchive_async( else: base_url = self._get_url(base_url, url_variables) - request = models.UnarchiveModelRequest( + request = models.JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest( model_id=model_id, ) @@ -1035,7 +1047,7 @@ async def unarchive_async( hook_ctx=HookContext( config=self.sdk_configuration, base_url=base_url or "", - operation_id="UnarchiveModel", + operation_id="jobs_api_routes_fine_tuning_unarchive_fine_tuned_model", oauth2_scopes=None, security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1047,12 +1059,12 @@ async def unarchive_async( ) if utils.match_response(http_res, "200", "application/json"): - return unmarshal_json_response(models.UnarchiveFTModelOut, http_res) + return unmarshal_json_response(models.UnarchiveModelResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/ocr.py b/src/mistralai/client/ocr.py index 2aa38229..a46119d1 100644 --- a/src/mistralai/client/ocr.py +++ b/src/mistralai/client/ocr.py @@ -2,12 +2,8 @@ # @generated-id: 2f804a12fc62 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - ocrrequest as models_ocrrequest, - responseformat as models_responseformat, -) from mistralai.client.types import Nullable, OptionalNullable, UNSET from mistralai.client.utils import get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -21,28 +17,20 @@ def process( self, *, model: Nullable[str], - document: Union[ - models_ocrrequest.Document, models_ocrrequest.DocumentTypedDict - ], + document: Union[models.DocumentUnion, models.DocumentUnionTypedDict], id: Optional[str] = None, pages: OptionalNullable[List[int]] = UNSET, include_image_base64: OptionalNullable[bool] = UNSET, image_limit: OptionalNullable[int] = UNSET, image_min_size: OptionalNullable[int] = UNSET, bbox_annotation_format: OptionalNullable[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = UNSET, document_annotation_format: OptionalNullable[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = UNSET, document_annotation_prompt: OptionalNullable[str] = UNSET, - table_format: OptionalNullable[models_ocrrequest.TableFormat] = UNSET, + table_format: OptionalNullable[models.TableFormat] = UNSET, extract_header: Optional[bool] = None, extract_footer: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -83,7 +71,7 @@ def process( request = models.OCRRequest( model=model, id=id, - document=utils.get_pydantic_model(document, models.Document), + document=utils.get_pydantic_model(document, models.DocumentUnion), pages=pages, include_image_base64=include_image_base64, image_limit=image_limit, @@ -148,44 +136,36 @@ def process( return unmarshal_json_response(models.OCRResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def process_async( self, *, model: Nullable[str], - document: Union[ - models_ocrrequest.Document, models_ocrrequest.DocumentTypedDict - ], + document: Union[models.DocumentUnion, models.DocumentUnionTypedDict], id: Optional[str] = None, pages: OptionalNullable[List[int]] = UNSET, include_image_base64: OptionalNullable[bool] = UNSET, image_limit: OptionalNullable[int] = UNSET, image_min_size: OptionalNullable[int] = UNSET, bbox_annotation_format: OptionalNullable[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = UNSET, document_annotation_format: OptionalNullable[ - Union[ - models_responseformat.ResponseFormat, - models_responseformat.ResponseFormatTypedDict, - ] + Union[models.ResponseFormat, models.ResponseFormatTypedDict] ] = UNSET, document_annotation_prompt: OptionalNullable[str] = UNSET, - table_format: OptionalNullable[models_ocrrequest.TableFormat] = UNSET, + table_format: OptionalNullable[models.TableFormat] = UNSET, extract_header: Optional[bool] = None, extract_footer: Optional[bool] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, @@ -226,7 +206,7 @@ async def process_async( request = models.OCRRequest( model=model, id=id, - document=utils.get_pydantic_model(document, models.Document), + document=utils.get_pydantic_model(document, models.DocumentUnion), pages=pages, include_image_base64=include_image_base64, image_limit=image_limit, @@ -291,14 +271,14 @@ async def process_async( return unmarshal_json_response(models.OCRResponse, http_res) if utils.match_response(http_res, "422", "application/json"): response_data = unmarshal_json_response( - models.HTTPValidationErrorData, http_res + errors.HTTPValidationErrorData, http_res ) - raise models.HTTPValidationError(response_data, http_res) + raise errors.HTTPValidationError(response_data, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) diff --git a/src/mistralai/client/sdk.py b/src/mistralai/client/sdk.py index b1ab5493..80bf25a7 100644 --- a/src/mistralai/client/sdk.py +++ b/src/mistralai/client/sdk.py @@ -8,7 +8,7 @@ from .utils.retries import RetryConfig import httpx import importlib -from mistralai.client import models, utils +from mistralai.client import models as models_, utils from mistralai.client._hooks import SDKHooks from mistralai.client.types import OptionalNullable, UNSET import sys @@ -31,10 +31,7 @@ class Mistral(BaseSDK): - r"""Mistral AI API: Dora OpenAPI schema - - Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it. - """ + r"""Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it.""" models: "Models" r"""Model Management API""" @@ -118,9 +115,9 @@ def __init__( security: Any = None if callable(api_key): # pylint: disable=unnecessary-lambda-assignment - security = lambda: models.Security(api_key=api_key()) + security = lambda: models_.Security(api_key=api_key()) else: - security = models.Security(api_key=api_key) + security = models_.Security(api_key=api_key) if server_url is not None: if url_params is not None: diff --git a/src/mistralai/client/transcriptions.py b/src/mistralai/client/transcriptions.py index f7ef5b0a..7f01917d 100644 --- a/src/mistralai/client/transcriptions.py +++ b/src/mistralai/client/transcriptions.py @@ -2,12 +2,8 @@ # @generated-id: 75b45780c978 from .basesdk import BaseSDK -from mistralai.client import models, utils +from mistralai.client import errors, models, utils from mistralai.client._hooks import HookContext -from mistralai.client.models import ( - file as models_file, - timestampgranularity as models_timestampgranularity, -) from mistralai.client.types import OptionalNullable, UNSET from mistralai.client.utils import eventstreaming, get_security_from_env from mistralai.client.utils.unmarshal_json_response import unmarshal_json_response @@ -21,16 +17,14 @@ def complete( self, *, model: str, - file: Optional[Union[models_file.File, models_file.FileTypedDict]] = None, + file: Optional[Union[models.File, models.FileTypedDict]] = None, file_url: OptionalNullable[str] = UNSET, file_id: OptionalNullable[str] = UNSET, language: OptionalNullable[str] = UNSET, temperature: OptionalNullable[float] = UNSET, diarize: Optional[bool] = False, context_bias: Optional[List[str]] = None, - timestamp_granularities: Optional[ - List[models_timestampgranularity.TimestampGranularity] - ] = None, + timestamp_granularities: Optional[List[models.TimestampGranularity]] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -121,27 +115,25 @@ def complete( return unmarshal_json_response(models.TranscriptionResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) async def complete_async( self, *, model: str, - file: Optional[Union[models_file.File, models_file.FileTypedDict]] = None, + file: Optional[Union[models.File, models.FileTypedDict]] = None, file_url: OptionalNullable[str] = UNSET, file_id: OptionalNullable[str] = UNSET, language: OptionalNullable[str] = UNSET, temperature: OptionalNullable[float] = UNSET, diarize: Optional[bool] = False, context_bias: Optional[List[str]] = None, - timestamp_granularities: Optional[ - List[models_timestampgranularity.TimestampGranularity] - ] = None, + timestamp_granularities: Optional[List[models.TimestampGranularity]] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -232,27 +224,25 @@ async def complete_async( return unmarshal_json_response(models.TranscriptionResponse, http_res) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) - raise models.SDKError("Unexpected response received", http_res) + raise errors.SDKError("Unexpected response received", http_res) def stream( self, *, model: str, - file: Optional[Union[models_file.File, models_file.FileTypedDict]] = None, + file: Optional[Union[models.File, models.FileTypedDict]] = None, file_url: OptionalNullable[str] = UNSET, file_id: OptionalNullable[str] = UNSET, language: OptionalNullable[str] = UNSET, temperature: OptionalNullable[float] = UNSET, diarize: Optional[bool] = False, context_bias: Optional[List[str]] = None, - timestamp_granularities: Optional[ - List[models_timestampgranularity.TimestampGranularity] - ] = None, + timestamp_granularities: Optional[List[models.TimestampGranularity]] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -352,28 +342,26 @@ def stream( ) if utils.match_response(http_res, "4XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = utils.stream_to_text(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) async def stream_async( self, *, model: str, - file: Optional[Union[models_file.File, models_file.FileTypedDict]] = None, + file: Optional[Union[models.File, models.FileTypedDict]] = None, file_url: OptionalNullable[str] = UNSET, file_id: OptionalNullable[str] = UNSET, language: OptionalNullable[str] = UNSET, temperature: OptionalNullable[float] = UNSET, diarize: Optional[bool] = False, context_bias: Optional[List[str]] = None, - timestamp_granularities: Optional[ - List[models_timestampgranularity.TimestampGranularity] - ] = None, + timestamp_granularities: Optional[List[models.TimestampGranularity]] = None, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, @@ -473,10 +461,10 @@ async def stream_async( ) if utils.match_response(http_res, "4XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) if utils.match_response(http_res, "5XX", "*"): http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("API error occurred", http_res, http_res_text) + raise errors.SDKError("API error occurred", http_res, http_res_text) http_res_text = await utils.stream_to_text_async(http_res) - raise models.SDKError("Unexpected response received", http_res, http_res_text) + raise errors.SDKError("Unexpected response received", http_res, http_res_text) diff --git a/src/mistralai/client/utils/__init__.py b/src/mistralai/client/utils/__init__.py index 7ed3a420..4bde281a 100644 --- a/src/mistralai/client/utils/__init__.py +++ b/src/mistralai/client/utils/__init__.py @@ -1,15 +1,24 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" # @generated-id: b69505f4b269 -from typing import TYPE_CHECKING -from importlib import import_module -import builtins -import sys +from typing import Any, TYPE_CHECKING, Callable, TypeVar +import asyncio + +from .dynamic_imports import lazy_getattr, lazy_dir + +_T = TypeVar("_T") + + +async def run_sync_in_thread(func: Callable[..., _T], *args) -> _T: + """Run a synchronous function in a thread pool to avoid blocking the event loop.""" + return await asyncio.to_thread(func, *args) + if TYPE_CHECKING: from .annotations import get_discriminator from .datetimes import parse_datetime from .enums import OpenEnumMeta + from .unions import parse_open_union from .headers import get_headers, get_response_headers from .metadata import ( FieldMetadata, @@ -79,6 +88,7 @@ "match_response", "MultipartFormMetadata", "OpenEnumMeta", + "parse_open_union", "PathParamMetadata", "QueryParamMetadata", "remove_suffix", @@ -132,6 +142,7 @@ "match_response": ".values", "MultipartFormMetadata": ".metadata", "OpenEnumMeta": ".enums", + "parse_open_union": ".unions", "PathParamMetadata": ".metadata", "QueryParamMetadata": ".metadata", "remove_suffix": ".url", @@ -161,38 +172,11 @@ } -def dynamic_import(modname, retries=3): - for attempt in range(retries): - try: - return import_module(modname, __package__) - except KeyError: - # Clear any half-initialized module and retry - sys.modules.pop(modname, None) - if attempt == retries - 1: - break - raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") - - -def __getattr__(attr_name: str) -> object: - module_name = _dynamic_imports.get(attr_name) - if module_name is None: - raise AttributeError( - f"no {attr_name} found in _dynamic_imports, module name -> {__name__} " - ) - - try: - module = dynamic_import(module_name) - return getattr(module, attr_name) - except ImportError as e: - raise ImportError( - f"Failed to import {attr_name} from {module_name}: {e}" - ) from e - except AttributeError as e: - raise AttributeError( - f"Failed to get {attr_name} from {module_name}: {e}" - ) from e +def __getattr__(attr_name: str) -> Any: + return lazy_getattr( + attr_name, package=__package__, dynamic_imports=_dynamic_imports + ) def __dir__(): - lazy_attrs = builtins.list(_dynamic_imports.keys()) - return builtins.sorted(lazy_attrs) + return lazy_dir(dynamic_imports=_dynamic_imports) diff --git a/src/mistralai/client/utils/dynamic_imports.py b/src/mistralai/client/utils/dynamic_imports.py new file mode 100644 index 00000000..969f2fc7 --- /dev/null +++ b/src/mistralai/client/utils/dynamic_imports.py @@ -0,0 +1,55 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: ac9918d925c0 + +from importlib import import_module +import builtins +import sys + + +def dynamic_import(package, modname, retries=3): + """Import a module relative to package, retrying on KeyError from half-initialized modules.""" + for attempt in range(retries): + try: + return import_module(modname, package) + except KeyError: + sys.modules.pop(modname, None) + if attempt == retries - 1: + break + raise KeyError(f"Failed to import module '{modname}' after {retries} attempts") + + +def lazy_getattr(attr_name, *, package, dynamic_imports, sub_packages=None): + """Module-level __getattr__ that lazily loads from a dynamic_imports mapping. + + Args: + attr_name: The attribute being looked up. + package: The caller's __package__ (for relative imports). + dynamic_imports: Dict mapping attribute names to relative module paths. + sub_packages: Optional list of subpackage names to lazy-load. + """ + module_name = dynamic_imports.get(attr_name) + if module_name is not None: + try: + module = dynamic_import(package, module_name) + return getattr(module, attr_name) + except ImportError as e: + raise ImportError( + f"Failed to import {attr_name} from {module_name}: {e}" + ) from e + except AttributeError as e: + raise AttributeError( + f"Failed to get {attr_name} from {module_name}: {e}" + ) from e + + if sub_packages and attr_name in sub_packages: + return import_module(f".{attr_name}", package) + + raise AttributeError(f"module '{package}' has no attribute '{attr_name}'") + + +def lazy_dir(*, dynamic_imports, sub_packages=None): + """Module-level __dir__ that lists lazily-loadable attributes.""" + lazy_attrs = builtins.list(dynamic_imports.keys()) + if sub_packages: + lazy_attrs.extend(sub_packages) + return builtins.sorted(lazy_attrs) diff --git a/src/mistralai/client/utils/eventstreaming.py b/src/mistralai/client/utils/eventstreaming.py index 3fe3c7e1..19a12152 100644 --- a/src/mistralai/client/utils/eventstreaming.py +++ b/src/mistralai/client/utils/eventstreaming.py @@ -3,7 +3,9 @@ import re import json +from dataclasses import dataclass, asdict from typing import ( + Any, Callable, Generic, TypeVar, @@ -23,6 +25,7 @@ class EventStream(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: Generator[T, None, None] + _closed: bool def __init__( self, @@ -34,17 +37,21 @@ def __init__( self.response = response self.generator = stream_events(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __iter__(self): return self def __next__(self): + if self._closed: + raise StopIteration return next(self.generator) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): + self._closed = True self.response.close() @@ -54,6 +61,7 @@ class EventStreamAsync(Generic[T]): client_ref: Optional[object] response: httpx.Response generator: AsyncGenerator[T, None] + _closed: bool def __init__( self, @@ -65,33 +73,45 @@ def __init__( self.response = response self.generator = stream_events_async(response, decoder, sentinel) self.client_ref = client_ref + self._closed = False def __aiter__(self): return self async def __anext__(self): + if self._closed: + raise StopAsyncIteration return await self.generator.__anext__() async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): + self._closed = True await self.response.aclose() +@dataclass class ServerEvent: id: Optional[str] = None event: Optional[str] = None - data: Optional[str] = None + data: Any = None retry: Optional[int] = None MESSAGE_BOUNDARIES = [ b"\r\n\r\n", - b"\n\n", + b"\r\n\r", + b"\r\n\n", + b"\r\r\n", + b"\n\r\n", b"\r\r", + b"\n\r", + b"\n\n", ] +UTF8_BOM = b"\xef\xbb\xbf" + async def stream_events_async( response: httpx.Response, @@ -100,14 +120,10 @@ async def stream_events_async( ) -> AsyncGenerator[T, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None async for chunk in response.aiter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -122,15 +138,22 @@ async def stream_events_async( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + await response.aclose() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event @@ -142,14 +165,10 @@ def stream_events( ) -> Generator[T, None, None]: buffer = bytearray() position = 0 - discard = False + event_id: Optional[str] = None for chunk in response.iter_bytes(): - # We've encountered the sentinel value and should no longer process - # incoming data. Instead we throw new data away until the server closes - # the connection. - if discard: - continue - + if len(buffer) == 0 and chunk.startswith(UTF8_BOM): + chunk = chunk[len(UTF8_BOM) :] buffer += chunk for i in range(position, len(buffer)): char = buffer[i : i + 1] @@ -164,22 +183,33 @@ def stream_events( block = buffer[position:i] position = i + len(seq) - event, discard = _parse_event(block, decoder, sentinel) + event, discard, event_id = _parse_event( + raw=block, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event + if discard: + response.close() + return if position > 0: buffer = buffer[position:] position = 0 - event, discard = _parse_event(buffer, decoder, sentinel) + event, discard, _ = _parse_event( + raw=buffer, decoder=decoder, sentinel=sentinel, event_id=event_id + ) if event is not None: yield event def _parse_event( - raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None -) -> Tuple[Optional[T], bool]: + *, + raw: bytearray, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + event_id: Optional[str] = None, +) -> Tuple[Optional[T], bool, Optional[str]]: block = raw.decode() lines = re.split(r"\r?\n|\r", block) publish = False @@ -190,13 +220,16 @@ def _parse_event( continue delim = line.find(":") - if delim <= 0: + if delim == 0: continue - field = line[0:delim] - value = line[delim + 1 :] if delim < len(line) - 1 else "" - if len(value) and value[0] == " ": - value = value[1:] + field = line + value = "" + if delim > 0: + field = line[0:delim] + value = line[delim + 1 :] if delim < len(line) - 1 else "" + if len(value) and value[0] == " ": + value = value[1:] if field == "event": event.event = value @@ -205,37 +238,36 @@ def _parse_event( data += value + "\n" publish = True elif field == "id": - event.id = value publish = True + if "\x00" not in value: + event_id = value elif field == "retry": - event.retry = int(value) if value.isdigit() else None + if value.isdigit(): + event.retry = int(value) publish = True + event.id = event_id + if sentinel and data == f"{sentinel}\n": - return None, True + return None, True, event_id if data: data = data[:-1] - event.data = data - - data_is_primitive = ( - data.isnumeric() or data == "true" or data == "false" or data == "null" - ) - data_is_json = ( - data.startswith("{") or data.startswith("[") or data.startswith('"') - ) - - if data_is_primitive or data_is_json: - try: - event.data = json.loads(data) - except Exception: - pass + try: + event.data = json.loads(data) + except json.JSONDecodeError: + event.data = data out = None if publish: - out = decoder(json.dumps(event.__dict__)) - - return out, False + out_dict = { + k: v + for k, v in asdict(event).items() + if v is not None or (k == "data" and data) + } + out = decoder(json.dumps(out_dict)) + + return out, False, event_id def _peek_sequence(position: int, buffer: bytearray, sequence: bytes): diff --git a/src/mistralai/client/utils/forms.py b/src/mistralai/client/utils/forms.py index 2b474b9a..6facec53 100644 --- a/src/mistralai/client/utils/forms.py +++ b/src/mistralai/client/utils/forms.py @@ -143,7 +143,7 @@ def serialize_multipart_form( if field_metadata.file: if isinstance(val, List): # Handle array of files - array_field_name = f_name + "[]" + array_field_name = f_name for file_obj in val: if not _is_set(file_obj): continue @@ -186,7 +186,7 @@ def serialize_multipart_form( continue values.append(_val_to_string(value)) - array_field_name = f_name + "[]" + array_field_name = f_name form[array_field_name] = values else: form[f_name] = _val_to_string(val) diff --git a/src/mistralai/client/utils/retries.py b/src/mistralai/client/utils/retries.py index 90c008b0..bea13041 100644 --- a/src/mistralai/client/utils/retries.py +++ b/src/mistralai/client/utils/retries.py @@ -145,12 +145,7 @@ def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise @@ -194,12 +189,7 @@ async def do_request() -> httpx.Response: if res.status_code == parsed_code: raise TemporaryError(res) - except httpx.ConnectError as exception: - if retries.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except httpx.TimeoutException as exception: + except (httpx.NetworkError, httpx.TimeoutException) as exception: if retries.config.retry_connection_errors: raise diff --git a/src/mistralai/client/utils/security.py b/src/mistralai/client/utils/security.py index 4c73806d..d8b9d8fe 100644 --- a/src/mistralai/client/utils/security.py +++ b/src/mistralai/client/utils/security.py @@ -154,6 +154,8 @@ def _parse_security_scheme_value( elif scheme_type == "http": if sub_type == "bearer": headers[header_name] = _apply_bearer(value) + elif sub_type == "basic": + headers[header_name] = value elif sub_type == "custom": return else: diff --git a/src/mistralai/client/utils/unions.py b/src/mistralai/client/utils/unions.py new file mode 100644 index 00000000..14ef1bd5 --- /dev/null +++ b/src/mistralai/client/utils/unions.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" +# @generated-id: d23713342634 + +from typing import Any + +from pydantic import BaseModel, TypeAdapter + + +def parse_open_union( + v: Any, + *, + disc_key: str, + variants: dict[str, Any], + unknown_cls: type, + union_name: str, +) -> Any: + """Parse an open discriminated union value with forward-compatibility. + + Known discriminator values are dispatched to their variant types. + Unknown discriminator values produce an instance of the fallback class, + preserving the raw payload for inspection. + """ + if isinstance(v, BaseModel): + return v + if not isinstance(v, dict) or disc_key not in v: + raise ValueError(f"{union_name}: expected object with '{disc_key}' field") + disc = v[disc_key] + variant_cls = variants.get(disc) + if variant_cls is not None: + if isinstance(variant_cls, type) and issubclass(variant_cls, BaseModel): + return variant_cls.model_validate(v) + return TypeAdapter(variant_cls).validate_python(v) + return unknown_cls(raw=v) diff --git a/src/mistralai/client/utils/unmarshal_json_response.py b/src/mistralai/client/utils/unmarshal_json_response.py index 65190e5c..624433c4 100644 --- a/src/mistralai/client/utils/unmarshal_json_response.py +++ b/src/mistralai/client/utils/unmarshal_json_response.py @@ -6,7 +6,7 @@ import httpx from .serializers import unmarshal_json -from mistralai.client import models +from mistralai.client import errors T = TypeVar("T") @@ -31,7 +31,7 @@ def unmarshal_json_response( try: return unmarshal_json(body, typ) except Exception as e: - raise models.ResponseValidationError( + raise errors.ResponseValidationError( "Response validation failed", http_res, e, diff --git a/src/mistralai/extra/run/context.py b/src/mistralai/extra/run/context.py index 01baa6a9..7ade705f 100644 --- a/src/mistralai/extra/run/context.py +++ b/src/mistralai/extra/run/context.py @@ -22,7 +22,6 @@ create_tool_call, ) from mistralai.client.models import ( - AgentTool, CompletionArgs, CompletionArgsTypedDict, ConversationInputs, @@ -35,6 +34,8 @@ InputEntries, MessageInputEntry, ResponseFormat, + UnknownAgentTool, + UpdateAgentRequestTool, ) from mistralai.client.types.basemodel import BaseModel, OptionalNullable, UNSET @@ -187,8 +188,11 @@ async def prepare_agent_request(self, beta_client: "Beta") -> AgentRequestKwargs ) agent = await beta_client.agents.get_async(agent_id=self.agent_id) agent_tools = agent.tools or [] - updated_tools: list[AgentTool] = [] + updated_tools: list[UpdateAgentRequestTool] = [] for tool in agent_tools: + if isinstance(tool, UnknownAgentTool): + # Skip unknown tools - can't include them in update request + continue if not isinstance(tool, FunctionTool): updated_tools.append(tool) elif tool.function.name in self._callable_tools: